Add RTP implementation as plugin
This commit is contained in:
parent
e6a933ad30
commit
ef2e3c774c
|
@ -6,6 +6,10 @@ if(DINO_PLUGIN_ENABLED_ice)
|
|||
add_subdirectory(ice)
|
||||
endif(DINO_PLUGIN_ENABLED_ice)
|
||||
|
||||
if(DINO_PLUGIN_ENABLED_rtp)
|
||||
add_subdirectory(rtp)
|
||||
endif(DINO_PLUGIN_ENABLED_rtp)
|
||||
|
||||
if(DINO_PLUGIN_ENABLED_openpgp)
|
||||
add_subdirectory(gpgme-vala)
|
||||
add_subdirectory(openpgp)
|
||||
|
|
|
@ -81,12 +81,6 @@ public class HttpFileSender : FileSender, Object {
|
|||
}
|
||||
}
|
||||
|
||||
public async long get_max_file_size(Account account) {
|
||||
lock (max_file_sizes) {
|
||||
return max_file_sizes[account];
|
||||
}
|
||||
}
|
||||
|
||||
private static void transfer_more_bytes(InputStream stream, Soup.MessageBody body) {
|
||||
uint8[] bytes = new uint8[4096];
|
||||
ssize_t read = stream.read(bytes);
|
||||
|
|
36
plugins/rtp/CMakeLists.txt
Normal file
36
plugins/rtp/CMakeLists.txt
Normal file
|
@ -0,0 +1,36 @@
|
|||
find_packages(RTP_PACKAGES REQUIRED
|
||||
Gee
|
||||
GLib
|
||||
GModule
|
||||
GObject
|
||||
GTK3
|
||||
Gst
|
||||
GstApp
|
||||
)
|
||||
|
||||
vala_precompile(RTP_VALA_C
|
||||
SOURCES
|
||||
src/codec_util.vala
|
||||
src/device.vala
|
||||
src/module.vala
|
||||
src/plugin.vala
|
||||
src/stream.vala
|
||||
src/video_widget.vala
|
||||
src/register_plugin.vala
|
||||
CUSTOM_VAPIS
|
||||
${CMAKE_BINARY_DIR}/exports/xmpp-vala.vapi
|
||||
${CMAKE_BINARY_DIR}/exports/dino.vapi
|
||||
${CMAKE_BINARY_DIR}/exports/qlite.vapi
|
||||
PACKAGES
|
||||
${RTP_PACKAGES}
|
||||
OPTIONS
|
||||
--vapidir=${CMAKE_CURRENT_SOURCE_DIR}/vapi
|
||||
)
|
||||
|
||||
add_definitions(${VALA_CFLAGS} -DG_LOG_DOMAIN="rtp")
|
||||
add_library(rtp SHARED ${RTP_VALA_C})
|
||||
target_link_libraries(rtp libdino ${RTP_PACKAGES})
|
||||
set_target_properties(rtp PROPERTIES PREFIX "")
|
||||
set_target_properties(rtp PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/plugins/)
|
||||
|
||||
install(TARGETS rtp ${PLUGIN_INSTALL})
|
245
plugins/rtp/src/codec_util.vala
Normal file
245
plugins/rtp/src/codec_util.vala
Normal file
|
@ -0,0 +1,245 @@
|
|||
using Gee;
|
||||
using Xmpp;
|
||||
using Xmpp.Xep;
|
||||
|
||||
public class Dino.Plugins.Rtp.CodecUtil {
|
||||
private Set<string> supported_elements = new HashSet<string>();
|
||||
private Set<string> unsupported_elements = new HashSet<string>();
|
||||
|
||||
public static Gst.Caps get_caps(string media, JingleRtp.PayloadType payload_type) {
|
||||
Gst.Caps caps = new Gst.Caps.simple("application/x-rtp",
|
||||
"media", typeof(string), media,
|
||||
"payload", typeof(int), payload_type.id);
|
||||
//"channels", typeof(int), payloadType.channels,
|
||||
//"max-ptime", typeof(int), payloadType.maxptime);
|
||||
unowned Gst.Structure s = caps.get_structure(0);
|
||||
if (payload_type.clockrate != 0) {
|
||||
s.set("clock-rate", typeof(int), payload_type.clockrate);
|
||||
}
|
||||
if (payload_type.name != null) {
|
||||
s.set("encoding-name", typeof(string), payload_type.name.up());
|
||||
}
|
||||
return caps;
|
||||
}
|
||||
|
||||
public static string? get_codec_from_payload(string media, JingleRtp.PayloadType payload_type) {
|
||||
if (payload_type.name != null) return payload_type.name.down();
|
||||
if (media == "audio") {
|
||||
switch (payload_type.id) {
|
||||
case 0:
|
||||
return "pcmu";
|
||||
case 8:
|
||||
return "pcma";
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static string? get_media_type_from_payload(string media, JingleRtp.PayloadType payload_type) {
|
||||
return get_media_type(media, get_codec_from_payload(media, payload_type));
|
||||
}
|
||||
|
||||
public static string? get_media_type(string media, string? codec) {
|
||||
if (codec == null) return null;
|
||||
if (media == "audio") {
|
||||
switch (codec) {
|
||||
case "pcma":
|
||||
return "audio/x-alaw";
|
||||
case "pcmu":
|
||||
return "audio/x-mulaw";
|
||||
}
|
||||
}
|
||||
return @"$media/x-$codec";
|
||||
}
|
||||
|
||||
public static string? get_rtp_pay_element_name_from_payload(string media, JingleRtp.PayloadType payload_type) {
|
||||
return get_pay_candidate(media, get_codec_from_payload(media, payload_type));
|
||||
}
|
||||
|
||||
public static string? get_pay_candidate(string media, string? codec) {
|
||||
if (codec == null) return null;
|
||||
return @"rtp$(codec)pay";
|
||||
}
|
||||
|
||||
public static string? get_rtp_depay_element_name_from_payload(string media, JingleRtp.PayloadType payload_type) {
|
||||
return get_depay_candidate(media, get_codec_from_payload(media, payload_type));
|
||||
}
|
||||
|
||||
public static string? get_depay_candidate(string media, string? codec) {
|
||||
if (codec == null) return null;
|
||||
return @"rtp$(codec)depay";
|
||||
}
|
||||
|
||||
public static string[] get_encode_candidates(string media, string? codec) {
|
||||
if (codec == null) return new string[0];
|
||||
if (media == "audio") {
|
||||
switch (codec) {
|
||||
case "opus":
|
||||
return new string[] {"opusenc"};
|
||||
case "speex":
|
||||
return new string[] {"speexenc"};
|
||||
case "pcma":
|
||||
return new string[] {"alawenc"};
|
||||
case "pcmu":
|
||||
return new string[] {"mulawenc"};
|
||||
}
|
||||
} else if (media == "video") {
|
||||
switch (codec) {
|
||||
case "h264":
|
||||
return new string[] {/*"msdkh264enc", */"vaapih264enc", "x264enc"};
|
||||
case "vp9":
|
||||
return new string[] {/*"msdkvp9enc", */"vaapivp9enc" /*, "vp9enc" */};
|
||||
case "vp8":
|
||||
return new string[] {/*"msdkvp8enc", */"vaapivp8enc", "vp8enc"};
|
||||
}
|
||||
}
|
||||
return new string[0];
|
||||
}
|
||||
|
||||
public static string[] get_decode_candidates(string media, string? codec) {
|
||||
if (codec == null) return new string[0];
|
||||
if (media == "audio") {
|
||||
switch (codec) {
|
||||
case "opus":
|
||||
return new string[] {"opusdec"};
|
||||
case "speex":
|
||||
return new string[] {"speexdec"};
|
||||
case "pcma":
|
||||
return new string[] {"alawdec"};
|
||||
case "pcmu":
|
||||
return new string[] {"mulawdec"};
|
||||
}
|
||||
} else if (media == "video") {
|
||||
switch (codec) {
|
||||
case "h264":
|
||||
return new string[] {/*"msdkh264dec", */"vaapih264dec"};
|
||||
case "vp9":
|
||||
return new string[] {/*"msdkvp9dec", */"vaapivp9dec", "vp9dec"};
|
||||
case "vp8":
|
||||
return new string[] {/*"msdkvp8dec", */"vaapivp8dec", "vp8dec"};
|
||||
}
|
||||
}
|
||||
return new string[0];
|
||||
}
|
||||
|
||||
public static string? get_encode_prefix(string media, string codec, string encode) {
|
||||
if (encode == "msdkh264enc") return "video/x-raw,format=NV12 ! ";
|
||||
if (encode == "vaapih264enc") return "video/x-raw,format=NV12 ! ";
|
||||
return null;
|
||||
}
|
||||
|
||||
public static string? get_encode_suffix(string media, string codec, string encode) {
|
||||
// H264
|
||||
const string h264_suffix = " ! video/x-h264,profile=constrained-baseline ! h264parse";
|
||||
if (encode == "msdkh264enc") return @" bitrate=256 rate-control=vbr target-usage=7$h264_suffix";
|
||||
if (encode == "vaapih264enc") return @" bitrate=256 quality-level=7 tune=low-power$h264_suffix";
|
||||
if (encode == "x264enc") return @" byte-stream=1 bitrate=256 profile=baseline speed-preset=ultrafast tune=zerolatency$h264_suffix";
|
||||
if (media == "video" && codec == "h264") return h264_suffix;
|
||||
|
||||
// VP8
|
||||
if (encode == "msdkvp8enc") return " bitrate=256 rate-control=vbr target-usage=7";
|
||||
if (encode == "vaapivp8enc") return " bitrate=256 rate-control=vbr quality-level=7";
|
||||
if (encode == "vp8enc") return " target-bitrate=256000 deadline=1 error-resilient=1";
|
||||
|
||||
// OPUS
|
||||
if (encode == "opusenc") return " audio-type=voice";
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public static string? get_decode_prefix(string media, string codec, string decode) {
|
||||
return null;
|
||||
}
|
||||
|
||||
public bool is_element_supported(string element_name) {
|
||||
if (unsupported_elements.contains(element_name)) return false;
|
||||
if (supported_elements.contains(element_name)) return true;
|
||||
var test_element = Gst.ElementFactory.make(element_name, @"test-$element_name");
|
||||
if (test_element != null) {
|
||||
supported_elements.add(element_name);
|
||||
return true;
|
||||
} else {
|
||||
debug("%s is not supported on this platform", element_name);
|
||||
unsupported_elements.add(element_name);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public string? get_encode_element_name(string media, string? codec) {
|
||||
foreach (string candidate in get_encode_candidates(media, codec)) {
|
||||
if (is_element_supported(candidate)) return candidate;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public string? get_pay_element_name(string media, string? codec) {
|
||||
string candidate = get_pay_candidate(media, codec);
|
||||
if (is_element_supported(candidate)) return candidate;
|
||||
return null;
|
||||
}
|
||||
|
||||
public string? get_decode_element_name(string media, string? codec) {
|
||||
foreach (string candidate in get_decode_candidates(media, codec)) {
|
||||
if (is_element_supported(candidate)) return candidate;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public string? get_depay_element_name(string media, string? codec) {
|
||||
string candidate = get_depay_candidate(media, codec);
|
||||
if (is_element_supported(candidate)) return candidate;
|
||||
return null;
|
||||
}
|
||||
|
||||
public void mark_element_unsupported(string element_name) {
|
||||
unsupported_elements.add(element_name);
|
||||
}
|
||||
|
||||
public string? get_decode_bin_description(string media, string? codec, string? element_name = null, string? name = null) {
|
||||
if (codec == null) return null;
|
||||
string base_name = name ?? @"encode-$codec-$(Random.next_int())";
|
||||
string depay = get_depay_element_name(media, codec);
|
||||
string decode = element_name ?? get_decode_element_name(media, codec);
|
||||
if (depay == null || decode == null) return null;
|
||||
string decode_prefix = get_decode_prefix(media, codec, decode) ?? "";
|
||||
return @"$depay name=$base_name-rtp-depay ! $decode_prefix$decode name=$base_name-decode ! $(media)convert name=$base_name-convert";
|
||||
}
|
||||
|
||||
public Gst.Element? get_decode_bin(string media, JingleRtp.PayloadType payload_type, string? name = null) {
|
||||
string? codec = get_codec_from_payload(media, payload_type);
|
||||
string base_name = name ?? @"encode-$codec-$(Random.next_int())";
|
||||
string? desc = get_decode_bin_description(media, codec, null, base_name);
|
||||
if (desc == null) return null;
|
||||
debug("Pipeline to decode %s %s: %s", media, codec, desc);
|
||||
Gst.Element bin = Gst.parse_bin_from_description(desc, true);
|
||||
bin.name = name;
|
||||
return bin;
|
||||
}
|
||||
|
||||
public string? get_encode_bin_description(string media, string? codec, string? element_name = null, uint pt = 96, string? name = null) {
|
||||
if (codec == null) return null;
|
||||
string base_name = name ?? @"encode-$codec-$(Random.next_int())";
|
||||
string pay = get_pay_element_name(media, codec);
|
||||
string encode = element_name ?? get_encode_element_name(media, codec);
|
||||
if (pay == null || encode == null) return null;
|
||||
string encode_prefix = get_encode_prefix(media, codec, encode) ?? "";
|
||||
string encode_suffix = get_encode_suffix(media, codec, encode) ?? "";
|
||||
if (media == "audio") {
|
||||
return @"audioconvert name=$base_name-convert ! audioresample name=$base_name-resample ! $encode_prefix$encode$encode_suffix ! $pay pt=$pt name=$base_name-rtp-pay";
|
||||
} else {
|
||||
return @"$(media)convert name=$base_name-convert ! $encode_prefix$encode$encode_suffix ! $pay pt=$pt name=$base_name-rtp-pay";
|
||||
}
|
||||
}
|
||||
|
||||
public Gst.Element? get_encode_bin(string media, JingleRtp.PayloadType payload_type, string? name = null) {
|
||||
string? codec = get_codec_from_payload(media, payload_type);
|
||||
string base_name = name ?? @"encode-$codec-$(Random.next_int())";
|
||||
string? desc = get_encode_bin_description(media, codec, null, payload_type.id, base_name);
|
||||
if (desc == null) return null;
|
||||
debug("Pipeline to encode %s %s: %s", media, codec, desc);
|
||||
Gst.Element bin = Gst.parse_bin_from_description(desc, true);
|
||||
bin.name = name;
|
||||
return bin;
|
||||
}
|
||||
|
||||
}
|
206
plugins/rtp/src/device.vala
Normal file
206
plugins/rtp/src/device.vala
Normal file
|
@ -0,0 +1,206 @@
|
|||
public class Dino.Plugins.Rtp.Device : MediaDevice, Object {
|
||||
public Plugin plugin { get; private set; }
|
||||
public Gst.Device device { get; private set; }
|
||||
|
||||
private string device_name;
|
||||
public string id { get {
|
||||
return device_name;
|
||||
}}
|
||||
private string device_display_name;
|
||||
public string display_name { get {
|
||||
return device_display_name;
|
||||
}}
|
||||
public string detail_name { get {
|
||||
return device.properties.get_string("alsa.card_name") ?? device.properties.get_string("alsa.id") ?? id;
|
||||
}}
|
||||
public Gst.Pipeline pipe { get {
|
||||
return plugin.pipe;
|
||||
}}
|
||||
public string? media { get {
|
||||
if (device.device_class.has_prefix("Audio/")) {
|
||||
return "audio";
|
||||
} else if (device.device_class.has_prefix("Video/")) {
|
||||
return "video";
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}}
|
||||
public bool is_source { get {
|
||||
return device.device_class.has_suffix("/Source");
|
||||
}}
|
||||
public bool is_sink { get {
|
||||
return device.device_class.has_suffix("/Sink");
|
||||
}}
|
||||
|
||||
private Gst.Element element;
|
||||
private Gst.Element tee;
|
||||
private Gst.Element dsp;
|
||||
private Gst.Element mixer;
|
||||
private Gst.Element filter;
|
||||
private int links = 0;
|
||||
|
||||
public Device(Plugin plugin, Gst.Device device) {
|
||||
this.plugin = plugin;
|
||||
update(device);
|
||||
}
|
||||
|
||||
public bool matches(Gst.Device device) {
|
||||
if (this.device.name == device.name) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
public void update(Gst.Device device) {
|
||||
this.device = device;
|
||||
this.device_name = device.name;
|
||||
this.device_display_name = device.display_name;
|
||||
}
|
||||
|
||||
public Gst.Element? link_sink() {
|
||||
if (element == null) create();
|
||||
links++;
|
||||
if (mixer != null) return mixer;
|
||||
if (is_sink && media == "audio") return plugin.echoprobe;
|
||||
return element;
|
||||
}
|
||||
|
||||
public Gst.Element? link_source() {
|
||||
if (element == null) create();
|
||||
links++;
|
||||
if (tee != null) return tee;
|
||||
return element;
|
||||
}
|
||||
|
||||
public void unlink() {
|
||||
if (links <= 0) {
|
||||
critical("Link count below zero.");
|
||||
return;
|
||||
}
|
||||
links--;
|
||||
if (links == 0) {
|
||||
destroy();
|
||||
}
|
||||
}
|
||||
|
||||
private Gst.Caps get_best_caps() {
|
||||
if (media == "audio") {
|
||||
return Gst.Caps.from_string("audio/x-raw,rate=48000,channels=1");
|
||||
} else if (media == "video" && device.caps.get_size() > 0) {
|
||||
int best_index = 0;
|
||||
int best_fps = 0;
|
||||
int best_width = 0;
|
||||
int best_height = 0;
|
||||
for (int i = 0; i < device.caps.get_size(); i++) {
|
||||
unowned Gst.Structure? that = device.caps.get_structure(i);
|
||||
if (!that.has_name("video/x-raw")) continue;
|
||||
int num = 0, den = 0, width = 0, height = 0;
|
||||
if (!that.has_field("framerate") || !that.get_fraction("framerate", out num, out den)) continue;
|
||||
if (!that.has_field("width") || !that.get_int("width", out width)) continue;
|
||||
if (!that.has_field("height") || !that.get_int("height", out height)) continue;
|
||||
int fps = num/den;
|
||||
if (best_fps < fps || best_fps == fps && best_width < width || best_fps == fps && best_width == width && best_height < height) {
|
||||
best_fps = fps;
|
||||
best_width = width;
|
||||
best_height = height;
|
||||
best_index = i;
|
||||
}
|
||||
}
|
||||
return device.caps.copy_nth(best_index);
|
||||
} else if (device.caps.get_size() > 0) {
|
||||
return device.caps.copy_nth(0);
|
||||
} else {
|
||||
return new Gst.Caps.any();
|
||||
}
|
||||
}
|
||||
|
||||
private void create() {
|
||||
debug("Creating device %s", id);
|
||||
plugin.pause();
|
||||
element = device.create_element(id);
|
||||
pipe.add(element);
|
||||
if (is_source) {
|
||||
filter = Gst.ElementFactory.make("capsfilter", @"$id-caps-filter");
|
||||
filter.@set("caps", get_best_caps());
|
||||
pipe.add(filter);
|
||||
element.link(filter);
|
||||
if (media == "audio") {
|
||||
dsp = Gst.ElementFactory.make("webrtcdsp", @"$id-dsp");
|
||||
dsp.@set("probe", plugin.echoprobe.name);
|
||||
pipe.add(dsp);
|
||||
filter.link(dsp);
|
||||
}
|
||||
tee = Gst.ElementFactory.make("tee", @"$id-tee");
|
||||
tee.@set("allow-not-linked", true);
|
||||
pipe.add(tee);
|
||||
(dsp ?? filter).link(tee);
|
||||
}
|
||||
if (is_sink) {
|
||||
element.@set("async", false);
|
||||
element.@set("sync", false);
|
||||
}
|
||||
if (is_sink && media == "audio") {
|
||||
// mixer = Gst.ElementFactory.make("audiomixer", @"$id-mixer");
|
||||
// pipe.add(mixer);
|
||||
// mixer.link(plugin.echoprobe);
|
||||
plugin.echoprobe.link(element);
|
||||
}
|
||||
plugin.unpause();
|
||||
}
|
||||
|
||||
private void destroy() {
|
||||
if (mixer != null) {
|
||||
if (is_sink && media == "audio") {
|
||||
plugin.echoprobe.unlink(mixer);
|
||||
}
|
||||
int linked_sink_pads = 0;
|
||||
mixer.foreach_sink_pad((_, pad) => {
|
||||
if (pad.is_linked()) linked_sink_pads++;
|
||||
return true;
|
||||
});
|
||||
if (linked_sink_pads > 0) {
|
||||
warning("%s-mixer still has %i sink pads while being destroyed", id, linked_sink_pads);
|
||||
}
|
||||
mixer.set_locked_state(true);
|
||||
mixer.set_state(Gst.State.NULL);
|
||||
mixer.unlink(element);
|
||||
pipe.remove(mixer);
|
||||
mixer = null;
|
||||
} else if (is_sink && media == "audio") {
|
||||
plugin.echoprobe.unlink(element);
|
||||
}
|
||||
element.set_locked_state(true);
|
||||
element.set_state(Gst.State.NULL);
|
||||
if (filter != null) element.unlink(filter);
|
||||
else if (is_source) element.unlink(tee);
|
||||
pipe.remove(element);
|
||||
element = null;
|
||||
if (filter != null) {
|
||||
filter.set_locked_state(true);
|
||||
filter.set_state(Gst.State.NULL);
|
||||
filter.unlink(dsp ?? tee);
|
||||
pipe.remove(filter);
|
||||
filter = null;
|
||||
}
|
||||
if (dsp != null) {
|
||||
dsp.set_locked_state(true);
|
||||
dsp.set_state(Gst.State.NULL);
|
||||
dsp.unlink(tee);
|
||||
pipe.remove(dsp);
|
||||
dsp = null;
|
||||
}
|
||||
if (tee != null) {
|
||||
int linked_src_pads = 0;
|
||||
tee.foreach_src_pad((_, pad) => {
|
||||
if (pad.is_linked()) linked_src_pads++;
|
||||
return true;
|
||||
});
|
||||
if (linked_src_pads != 0) {
|
||||
warning("%s-tee still has %d src pads while being destroyed", id, linked_src_pads);
|
||||
}
|
||||
tee.set_locked_state(true);
|
||||
tee.set_state(Gst.State.NULL);
|
||||
pipe.remove(tee);
|
||||
tee = null;
|
||||
}
|
||||
debug("Destroyed device %s", id);
|
||||
}
|
||||
}
|
264
plugins/rtp/src/module.vala
Normal file
264
plugins/rtp/src/module.vala
Normal file
|
@ -0,0 +1,264 @@
|
|||
using Gee;
|
||||
using Xmpp;
|
||||
using Xmpp.Xep;
|
||||
|
||||
public class Dino.Plugins.Rtp.Module : JingleRtp.Module {
|
||||
private Set<string> supported_codecs = new HashSet<string>();
|
||||
private Set<string> unsupported_codecs = new HashSet<string>();
|
||||
public Plugin plugin { get; private set; }
|
||||
public CodecUtil codec_util { get {
|
||||
return plugin.codec_util;
|
||||
}}
|
||||
|
||||
public Module(Plugin plugin) {
|
||||
base();
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
private async bool pipeline_works(string media, string element_desc) {
|
||||
var supported = false;
|
||||
string pipeline_desc = @"$(media)testsrc is-live=true ! $element_desc ! appsink name=output";
|
||||
try {
|
||||
var pipeline = Gst.parse_launch(pipeline_desc);
|
||||
var output = (pipeline as Gst.Bin).get_by_name("output") as Gst.App.Sink;
|
||||
SourceFunc callback = pipeline_works.callback;
|
||||
var finished = false;
|
||||
output.emit_signals = true;
|
||||
output.new_sample.connect(() => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
supported = true;
|
||||
Idle.add(() => {
|
||||
callback();
|
||||
return Source.REMOVE;
|
||||
});
|
||||
}
|
||||
return Gst.FlowReturn.EOS;
|
||||
});
|
||||
pipeline.bus.add_watch(Priority.DEFAULT, (_, message) => {
|
||||
if (message.type == Gst.MessageType.ERROR && !finished) {
|
||||
Error e;
|
||||
string d;
|
||||
message.parse_error(out e, out d);
|
||||
debug("pipeline [%s] failed: %s", pipeline_desc, e.message);
|
||||
debug(d);
|
||||
finished = true;
|
||||
callback();
|
||||
}
|
||||
return true;
|
||||
});
|
||||
Timeout.add(2000, () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
callback();
|
||||
}
|
||||
return Source.REMOVE;
|
||||
});
|
||||
pipeline.set_state(Gst.State.PLAYING);
|
||||
yield;
|
||||
pipeline.set_state(Gst.State.NULL);
|
||||
} catch (Error e) {
|
||||
debug("pipeline [%s] failed: %s", pipeline_desc, e.message);
|
||||
}
|
||||
return supported;
|
||||
}
|
||||
|
||||
private async bool supports(string media, JingleRtp.PayloadType payload_type) {
|
||||
string codec = CodecUtil.get_codec_from_payload(media, payload_type);
|
||||
if (codec == null) return false;
|
||||
if (unsupported_codecs.contains(codec)) return false;
|
||||
if (supported_codecs.contains(codec)) return true;
|
||||
|
||||
string encode_element = codec_util.get_encode_element_name(media, codec);
|
||||
string decode_element = codec_util.get_decode_element_name(media, codec);
|
||||
if (encode_element == null || decode_element == null) {
|
||||
debug("No suitable encoder or decoder found for %s", codec);
|
||||
unsupported_codecs.add(codec);
|
||||
return false;
|
||||
}
|
||||
|
||||
string encode_bin = codec_util.get_encode_bin_description(media, codec, encode_element);
|
||||
while (!(yield pipeline_works(media, encode_bin))) {
|
||||
debug("%s not suited for encoding %s", encode_element, codec);
|
||||
codec_util.mark_element_unsupported(encode_element);
|
||||
encode_element = codec_util.get_encode_element_name(media, codec);
|
||||
if (encode_element == null) {
|
||||
debug("No suitable encoder found for %s", codec);
|
||||
unsupported_codecs.add(codec);
|
||||
return false;
|
||||
}
|
||||
encode_bin = codec_util.get_encode_bin_description(media, codec, encode_element);
|
||||
}
|
||||
debug("using %s to encode %s", encode_element, codec);
|
||||
|
||||
string decode_bin = codec_util.get_decode_bin_description(media, codec, decode_element);
|
||||
while (!(yield pipeline_works(media, @"$encode_bin ! $decode_bin"))) {
|
||||
debug("%s not suited for decoding %s", decode_element, codec);
|
||||
codec_util.mark_element_unsupported(decode_element);
|
||||
decode_element = codec_util.get_decode_element_name(media, codec);
|
||||
if (decode_element == null) {
|
||||
debug("No suitable decoder found for %s", codec);
|
||||
unsupported_codecs.add(codec);
|
||||
return false;
|
||||
}
|
||||
decode_bin = codec_util.get_decode_bin_description(media, codec, decode_element);
|
||||
}
|
||||
debug("using %s to decode %s", decode_element, codec);
|
||||
|
||||
supported_codecs.add(codec);
|
||||
return true;
|
||||
}
|
||||
|
||||
public async void add_if_supported(Gee.List<JingleRtp.PayloadType> list, string media, JingleRtp.PayloadType payload_type) {
|
||||
if (yield supports(media, payload_type)) {
|
||||
list.add(payload_type);
|
||||
}
|
||||
}
|
||||
|
||||
public override async Gee.List<JingleRtp.PayloadType> get_supported_payloads(string media) {
|
||||
Gee.List<JingleRtp.PayloadType> list = new ArrayList<JingleRtp.PayloadType>(JingleRtp.PayloadType.equals_func);
|
||||
if (media == "audio") {
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
channels = 2,
|
||||
clockrate = 48000,
|
||||
name = "opus",
|
||||
id = 96
|
||||
});
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
channels = 1,
|
||||
clockrate = 32000,
|
||||
name = "speex",
|
||||
id = 97
|
||||
});
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
channels = 1,
|
||||
clockrate = 16000,
|
||||
name = "speex",
|
||||
id = 98
|
||||
});
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
channels = 1,
|
||||
clockrate = 8000,
|
||||
name = "speex",
|
||||
id = 99
|
||||
});
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
channels = 1,
|
||||
clockrate = 8000,
|
||||
name = "PCMU",
|
||||
id = 0
|
||||
});
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
channels = 1,
|
||||
clockrate = 8000,
|
||||
name = "PCMA",
|
||||
id = 8
|
||||
});
|
||||
} else if (media == "video") {
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
clockrate = 90000,
|
||||
name = "H264",
|
||||
id = 96
|
||||
});
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
clockrate = 90000,
|
||||
name = "VP9",
|
||||
id = 97
|
||||
});
|
||||
yield add_if_supported(list, media, new JingleRtp.PayloadType() {
|
||||
clockrate = 90000,
|
||||
name = "VP8",
|
||||
id = 98
|
||||
});
|
||||
} else {
|
||||
warning("Unsupported media type: %s", media);
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public override async JingleRtp.PayloadType? pick_payload_type(string media, Gee.List<JingleRtp.PayloadType> payloads) {
|
||||
if (media == "audio") {
|
||||
foreach (JingleRtp.PayloadType type in payloads) {
|
||||
if (yield supports(media, type)) return type;
|
||||
}
|
||||
} else if (media == "video") {
|
||||
foreach (JingleRtp.PayloadType type in payloads) {
|
||||
if (yield supports(media, type)) return type;
|
||||
}
|
||||
} else {
|
||||
warning("Unsupported media type: %s", media);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public override JingleRtp.Stream create_stream(Jingle.Content content) {
|
||||
return plugin.open_stream(content);
|
||||
}
|
||||
|
||||
public override void close_stream(JingleRtp.Stream stream) {
|
||||
var rtp_stream = stream as Rtp.Stream;
|
||||
plugin.close_stream(rtp_stream);
|
||||
}
|
||||
|
||||
// public uint32 get_session_id(string id) {
|
||||
// return (uint32) id.split("-")[0].to_int();
|
||||
// }
|
||||
//
|
||||
// public string create_feed(string media, bool incoming) {
|
||||
// init();
|
||||
// string id = random_uuid();
|
||||
// if (media == "audio") {
|
||||
// id = "0-" + id;
|
||||
// } else {
|
||||
// id = "1-" + id;
|
||||
// }
|
||||
// MediaDevice? device = plugin.get_preferred_device(media, incoming);
|
||||
// Feed feed;
|
||||
// if (incoming) {
|
||||
// if (media == "audio") {
|
||||
// feed = new IncomingAudioFeed(id, this, device);
|
||||
// } else if (media == "video") {
|
||||
// feed = new IncomingVideoFeed(id, this, device);
|
||||
// } else {
|
||||
// critical("Incoming feed of media '%s' not supported", media);
|
||||
// return id;
|
||||
// }
|
||||
// } else {
|
||||
// if (media == "audio") {
|
||||
// string? matching_incoming_feed_id = null;
|
||||
// foreach (Feed match in plugin.feeds.values) {
|
||||
// if (match is IncomingAudioFeed) {
|
||||
// matching_incoming_feed_id = match.id;
|
||||
// }
|
||||
// }
|
||||
// feed = new OutgoingAudioFeed(id, this, device);
|
||||
// } else if (media == "video") {
|
||||
// feed = new OutgoingVideoFeed(id, this, device);
|
||||
// } else {
|
||||
// critical("Outgoing feed of media '%s' not supported", media);
|
||||
// return id;
|
||||
// }
|
||||
// }
|
||||
// plugin.add_feed(id, feed);
|
||||
// return id;
|
||||
// }
|
||||
//
|
||||
// public void connect_feed(string id, JingleRtp.PayloadType payload, Jingle.DatagramConnection connection) {
|
||||
// if (!plugin.feeds.has_key(id)) {
|
||||
// critical("Tried to connect feed with id %s, but no such feed found", id);
|
||||
// return;
|
||||
// }
|
||||
// Feed feed = plugin.feeds[id];
|
||||
// feed.connect(payload, connection);
|
||||
// }
|
||||
//
|
||||
// public void destroy_feed(string id) {
|
||||
// if (!plugin.feeds.has_key(id)) {
|
||||
// critical("Tried to destroy feed with id %s, but no such feed found", id);
|
||||
// return;
|
||||
// }
|
||||
// Feed feed = plugin.feeds[id];
|
||||
// feed.destroy();
|
||||
// plugin.feeds.remove(id);
|
||||
// }
|
||||
}
|
39
plugins/rtp/src/participant.vala
Normal file
39
plugins/rtp/src/participant.vala
Normal file
|
@ -0,0 +1,39 @@
|
|||
using Gee;
|
||||
using Xmpp;
|
||||
|
||||
public class Dino.Plugins.Rtp.Participant {
|
||||
public Jid full_jid { get; private set; }
|
||||
|
||||
protected Gst.Pipeline pipe;
|
||||
private Map<Stream, uint32> ssrcs = new HashMap<Stream, uint32>();
|
||||
|
||||
public Participant(Gst.Pipeline pipe, Jid full_jid) {
|
||||
this.pipe = pipe;
|
||||
this.full_jid = full_jid;
|
||||
}
|
||||
|
||||
public uint32 get_ssrc(Stream stream) {
|
||||
if (ssrcs.has_key(stream)) {
|
||||
return ssrcs[stream];
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public void set_ssrc(Stream stream, uint32 ssrc) {
|
||||
if (ssrcs.has_key(stream)) {
|
||||
warning("Learning ssrc %ul for %s in %s when it is already known as %ul", ssrc, full_jid.to_string(), stream.to_string(), ssrcs[stream]);
|
||||
} else {
|
||||
stream.on_destroy.connect(unset_ssrc);
|
||||
}
|
||||
ssrcs[stream] = ssrc;
|
||||
}
|
||||
|
||||
public void unset_ssrc(Stream stream) {
|
||||
ssrcs.unset(stream);
|
||||
stream.on_destroy.disconnect(unset_ssrc);
|
||||
}
|
||||
|
||||
public string to_string() {
|
||||
return @"participant $full_jid";
|
||||
}
|
||||
}
|
413
plugins/rtp/src/plugin.vala
Normal file
413
plugins/rtp/src/plugin.vala
Normal file
|
@ -0,0 +1,413 @@
|
|||
using Gee;
|
||||
using Xmpp;
|
||||
using Xmpp.Xep;
|
||||
|
||||
public class Dino.Plugins.Rtp.Plugin : RootInterface, VideoCallPlugin, Object {
|
||||
public Dino.Application app { get; private set; }
|
||||
public CodecUtil codec_util { get; private set; }
|
||||
public Gst.DeviceMonitor device_monitor { get; private set; }
|
||||
public Gst.Pipeline pipe { get; private set; }
|
||||
public Gst.Bin rtpbin { get; private set; }
|
||||
public Gst.Element echoprobe { get; private set; }
|
||||
|
||||
private Gee.List<Stream> streams = new ArrayList<Stream>();
|
||||
private Gee.List<Device> devices = new ArrayList<Device>();
|
||||
// private Gee.List<Participant> participants = new ArrayList<Participant>();
|
||||
|
||||
public void registered(Dino.Application app) {
|
||||
this.app = app;
|
||||
this.codec_util = new CodecUtil();
|
||||
app.startup.connect(startup);
|
||||
app.add_option_group(Gst.init_get_option_group());
|
||||
app.stream_interactor.module_manager.initialize_account_modules.connect((account, list) => {
|
||||
list.add(new Module(this));
|
||||
});
|
||||
app.plugin_registry.video_call_plugin = this;
|
||||
}
|
||||
|
||||
private int pause_count = 0;
|
||||
public void pause() {
|
||||
// if (pause_count == 0) {
|
||||
// debug("Pausing pipe for modifications");
|
||||
// pipe.set_state(Gst.State.PAUSED);
|
||||
// }
|
||||
pause_count++;
|
||||
}
|
||||
public void unpause() {
|
||||
pause_count--;
|
||||
if (pause_count == 0) {
|
||||
debug("Continue pipe after modifications");
|
||||
pipe.set_state(Gst.State.PLAYING);
|
||||
}
|
||||
if (pause_count < 0) warning("Pause count below zero!");
|
||||
}
|
||||
|
||||
public void startup() {
|
||||
device_monitor = new Gst.DeviceMonitor();
|
||||
device_monitor.show_all = true;
|
||||
device_monitor.get_bus().add_watch(Priority.DEFAULT, on_device_monitor_message);
|
||||
device_monitor.start();
|
||||
|
||||
pipe = new Gst.Pipeline(null);
|
||||
|
||||
// RTP
|
||||
rtpbin = Gst.ElementFactory.make("rtpbin", null) as Gst.Bin;
|
||||
if (rtpbin == null) {
|
||||
warning("RTP not supported");
|
||||
pipe = null;
|
||||
return;
|
||||
}
|
||||
rtpbin.pad_added.connect(on_rtp_pad_added);
|
||||
rtpbin.@set("latency", 100);
|
||||
rtpbin.connect("signal::request-pt-map", request_pt_map, this);
|
||||
pipe.add(rtpbin);
|
||||
|
||||
// Audio echo probe
|
||||
echoprobe = Gst.ElementFactory.make("webrtcechoprobe", "echo-probe");
|
||||
pipe.add(echoprobe);
|
||||
|
||||
// Pipeline
|
||||
pipe.auto_flush_bus = true;
|
||||
pipe.bus.add_watch(GLib.Priority.DEFAULT, (_, message) => {
|
||||
on_pipe_bus_message(message);
|
||||
return true;
|
||||
});
|
||||
pipe.set_state(Gst.State.PLAYING);
|
||||
}
|
||||
|
||||
private static Gst.Caps? request_pt_map(Gst.Element rtpbin, uint session, uint pt, Plugin plugin) {
|
||||
debug("request-pt-map");
|
||||
return null;
|
||||
}
|
||||
|
||||
private void on_rtp_pad_added(Gst.Pad pad) {
|
||||
debug("pad added: %s", pad.name);
|
||||
if (pad.name.has_prefix("recv_rtp_src_")) {
|
||||
string[] split = pad.name.split("_");
|
||||
uint8 rtpid = (uint8)int.parse(split[3]);
|
||||
foreach (Stream stream in streams) {
|
||||
if (stream.rtpid == rtpid) {
|
||||
stream.on_ssrc_pad_added(split[4], pad);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (pad.name.has_prefix("send_rtp_src_")) {
|
||||
string[] split = pad.name.split("_");
|
||||
uint8 rtpid = (uint8)int.parse(split[3]);
|
||||
debug("pad %s for stream %hhu", pad.name, rtpid);
|
||||
foreach (Stream stream in streams) {
|
||||
if (stream.rtpid == rtpid) {
|
||||
stream.on_send_rtp_src_added(pad);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void on_pipe_bus_message(Gst.Message message) {
|
||||
switch (message.type) {
|
||||
case Gst.MessageType.ERROR:
|
||||
Error error;
|
||||
string str;
|
||||
message.parse_error(out error, out str);
|
||||
warning("Error in pipeline: %s", error.message);
|
||||
debug(str);
|
||||
break;
|
||||
case Gst.MessageType.WARNING:
|
||||
Error error;
|
||||
string str;
|
||||
message.parse_warning(out error, out str);
|
||||
warning("Warning in pipeline: %s", error.message);
|
||||
debug(str);
|
||||
break;
|
||||
case Gst.MessageType.CLOCK_LOST:
|
||||
debug("Clock lost. Restarting");
|
||||
pipe.set_state(Gst.State.READY);
|
||||
pipe.set_state(Gst.State.PLAYING);
|
||||
break;
|
||||
case Gst.MessageType.STATE_CHANGED:
|
||||
Gst.State new_state;
|
||||
message.parse_state_changed(null, out new_state, null);
|
||||
if (message.src is Gst.Element) {
|
||||
debug("%s changed state to %s", ((Gst.Element)message.src).name, new_state.to_string());
|
||||
}
|
||||
break;
|
||||
case Gst.MessageType.STREAM_STATUS:
|
||||
Gst.StreamStatusType status;
|
||||
Gst.Element owner;
|
||||
message.parse_stream_status(out status, out owner);
|
||||
if (owner != null) {
|
||||
debug("%s stream changed status to %s", owner.name, status.to_string());
|
||||
}
|
||||
break;
|
||||
case Gst.MessageType.ELEMENT:
|
||||
unowned Gst.Structure struc = message.get_structure();
|
||||
if (struc != null && message.src is Gst.Element) {
|
||||
debug("Message from %s in pipeline: %s", ((Gst.Element)message.src).name, struc.to_string());
|
||||
}
|
||||
break;
|
||||
case Gst.MessageType.NEW_CLOCK:
|
||||
debug("New clock.");
|
||||
break;
|
||||
case Gst.MessageType.TAG:
|
||||
// Ignore
|
||||
break;
|
||||
case Gst.MessageType.QOS:
|
||||
// Ignore
|
||||
break;
|
||||
default:
|
||||
debug("Pipe bus message: %s", message.type.to_string());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private bool on_device_monitor_message(Gst.Bus bus, Gst.Message message) {
|
||||
Gst.Device old_device = null;
|
||||
Gst.Device device = null;
|
||||
switch (message.type) {
|
||||
case Gst.MessageType.DEVICE_ADDED:
|
||||
message.parse_device_added(out device);
|
||||
if (device.properties.has_name("pipewire-proplist") && device.device_class.has_prefix("Audio/")) return Source.CONTINUE;
|
||||
if (device.properties.get_string("device.class") == "monitor") return Source.CONTINUE;
|
||||
devices.add(new Device(this, device));
|
||||
break;
|
||||
case Gst.MessageType.DEVICE_CHANGED:
|
||||
message.parse_device_changed(out device, out old_device);
|
||||
if (device.properties.has_name("pipewire-proplist") && device.device_class.has_prefix("Audio/")) return Source.CONTINUE;
|
||||
if (device.properties.get_string("device.class") == "monitor") return Source.CONTINUE;
|
||||
devices.first_match((it) => it.matches(old_device)).update(device);
|
||||
break;
|
||||
case Gst.MessageType.DEVICE_REMOVED:
|
||||
message.parse_device_removed(out device);
|
||||
if (device.properties.has_name("pipewire-proplist") && device.device_class.has_prefix("Audio/")) return Source.CONTINUE;
|
||||
if (device.properties.get_string("device.class") == "monitor") return Source.CONTINUE;
|
||||
devices.remove(devices.first_match((it) => it.matches(device)));
|
||||
break;
|
||||
}
|
||||
if (device != null) {
|
||||
switch (device.device_class) {
|
||||
case "Audio/Source":
|
||||
devices_changed("audio", false);
|
||||
break;
|
||||
case "Audio/Sink":
|
||||
devices_changed("audio", true);
|
||||
break;
|
||||
case "Video/Source":
|
||||
devices_changed("video", false);
|
||||
break;
|
||||
case "Video/Sink":
|
||||
devices_changed("video", true);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return Source.CONTINUE;
|
||||
}
|
||||
|
||||
public uint8 next_free_id() {
|
||||
uint8 rtpid = 0;
|
||||
while (streams.size < 100 && streams.any_match((stream) => stream.rtpid == rtpid)) {
|
||||
rtpid++;
|
||||
}
|
||||
return rtpid;
|
||||
}
|
||||
|
||||
// public Participant get_participant(Jid full_jid, bool self) {
|
||||
// foreach (Participant participant in participants) {
|
||||
// if (participant.full_jid.equals(full_jid)) {
|
||||
// return participant;
|
||||
// }
|
||||
// }
|
||||
// Participant participant;
|
||||
// if (self) {
|
||||
// participant = new SelfParticipant(pipe, full_jid);
|
||||
// } else {
|
||||
// participant = new Participant(pipe, full_jid);
|
||||
// }
|
||||
// participants.add(participant);
|
||||
// return participant;
|
||||
// }
|
||||
|
||||
public Stream open_stream(Xmpp.Xep.Jingle.Content content) {
|
||||
var content_params = content.content_params as Xmpp.Xep.JingleRtp.Parameters;
|
||||
if (content_params == null) return null;
|
||||
Stream stream;
|
||||
if (content_params.media == "video") {
|
||||
stream = new VideoStream(this, content);
|
||||
} else {
|
||||
stream = new Stream(this, content);
|
||||
}
|
||||
streams.add(stream);
|
||||
return stream;
|
||||
}
|
||||
|
||||
public void close_stream(Stream stream) {
|
||||
streams.remove(stream);
|
||||
stream.destroy();
|
||||
}
|
||||
|
||||
public void shutdown() {
|
||||
device_monitor.stop();
|
||||
pipe.set_state(Gst.State.NULL);
|
||||
rtpbin = null;
|
||||
pipe = null;
|
||||
Gst.deinit();
|
||||
}
|
||||
|
||||
public VideoCallWidget? create_widget(WidgetType type) {
|
||||
if (type == WidgetType.GTK) {
|
||||
return new VideoWidget(this);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public Gee.List<MediaDevice> get_devices(string media, bool incoming) {
|
||||
if (media == "video" && !incoming) {
|
||||
return get_video_sources();
|
||||
}
|
||||
|
||||
ArrayList<MediaDevice> result = new ArrayList<MediaDevice>();
|
||||
foreach (Device device in devices) {
|
||||
if (device.media == media && (incoming && device.is_sink || !incoming && device.is_source)) {
|
||||
result.add(device);
|
||||
}
|
||||
}
|
||||
if (media == "audio") {
|
||||
// Reorder sources
|
||||
result.sort((media_left, media_right) => {
|
||||
Device left = media_left as Device;
|
||||
Device right = media_right as Device;
|
||||
if (left == null) return 1;
|
||||
if (right == null) return -1;
|
||||
|
||||
bool left_is_pipewire = left.device.properties.has_name("pipewire-proplist");
|
||||
bool right_is_pipewire = right.device.properties.has_name("pipewire-proplist");
|
||||
|
||||
bool left_is_default = false;
|
||||
left.device.properties.get_boolean("is-default", out left_is_default);
|
||||
bool right_is_default = false;
|
||||
right.device.properties.get_boolean("is-default", out right_is_default);
|
||||
|
||||
// Prefer pipewire
|
||||
if (left_is_pipewire && !right_is_pipewire) return -1;
|
||||
if (right_is_pipewire && !left_is_pipewire) return 1;
|
||||
|
||||
// Prefer pulse audio default device
|
||||
if (left_is_default && !right_is_default) return -1;
|
||||
if (right_is_default && !left_is_default) return 1;
|
||||
|
||||
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public Gee.List<MediaDevice> get_video_sources() {
|
||||
ArrayList<MediaDevice> pipewire_devices = new ArrayList<MediaDevice>();
|
||||
ArrayList<MediaDevice> other_devices = new ArrayList<MediaDevice>();
|
||||
|
||||
foreach (Device device in devices) {
|
||||
if (device.media != "video") continue;
|
||||
if (device.is_sink) continue;
|
||||
|
||||
bool is_color = false;
|
||||
for (int i = 0; i < device.device.caps.get_size(); i++) {
|
||||
unowned Gst.Structure structure = device.device.caps.get_structure(i);
|
||||
if (structure.has_field("format") && !structure.get_string("format").has_prefix("GRAY")) {
|
||||
is_color = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Don't allow grey-scale devices
|
||||
if (!is_color) continue;
|
||||
|
||||
if (device.device.properties.has_name("pipewire-proplist")) {
|
||||
pipewire_devices.add(device);
|
||||
} else {
|
||||
other_devices.add(device);
|
||||
}
|
||||
}
|
||||
|
||||
// If we have any pipewire devices, present only those. Don't want duplicated devices from pipewire and video for linux.
|
||||
ArrayList<MediaDevice> devices = pipewire_devices.size > 0 ? pipewire_devices : other_devices;
|
||||
|
||||
// Reorder sources
|
||||
devices.sort((media_left, media_right) => {
|
||||
Device left = media_left as Device;
|
||||
Device right = media_right as Device;
|
||||
if (left == null) return 1;
|
||||
if (right == null) return -1;
|
||||
|
||||
int left_fps = 0;
|
||||
for (int i = 0; i < left.device.caps.get_size(); i++) {
|
||||
unowned Gst.Structure structure = left.device.caps.get_structure(i);
|
||||
int num = 0, den = 0;
|
||||
if (structure.has_field("framerate") && structure.get_fraction("framerate", out num, out den)) left_fps = int.max(left_fps, num / den);
|
||||
}
|
||||
|
||||
int right_fps = 0;
|
||||
for (int i = 0; i < left.device.caps.get_size(); i++) {
|
||||
unowned Gst.Structure structure = left.device.caps.get_structure(i);
|
||||
int num = 0, den = 0;
|
||||
if (structure.has_field("framerate") && structure.get_fraction("framerate", out num, out den)) right_fps = int.max(right_fps, num / den);
|
||||
}
|
||||
|
||||
// More FPS is better
|
||||
if (left_fps > right_fps) return -1;
|
||||
if (right_fps > left_fps) return 1;
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
return devices;
|
||||
}
|
||||
|
||||
public Device? get_preferred_device(string media, bool incoming) {
|
||||
foreach (MediaDevice media_device in get_devices(media, incoming)) {
|
||||
Device? device = media_device as Device;
|
||||
if (device != null) return device;
|
||||
}
|
||||
warning("No preferred device for %s %s. Media will not be processed.", incoming ? "incoming" : "outgoing", media);
|
||||
return null;
|
||||
}
|
||||
|
||||
public MediaDevice? get_device(Xmpp.Xep.JingleRtp.Stream stream, bool incoming) {
|
||||
Stream plugin_stream = stream as Stream;
|
||||
if (plugin_stream == null) return null;
|
||||
if (incoming) {
|
||||
return plugin_stream.output_device ?? get_preferred_device(stream.media, incoming);
|
||||
} else {
|
||||
return plugin_stream.input_device ?? get_preferred_device(stream.media, incoming);
|
||||
}
|
||||
}
|
||||
|
||||
private void dump_dot() {
|
||||
string name = @"pipe-$(pipe.clock.get_time())-$(pipe.current_state)";
|
||||
Gst.Debug.bin_to_dot_file(pipe, Gst.DebugGraphDetails.ALL, name);
|
||||
debug("Stored pipe details as %s", name);
|
||||
}
|
||||
|
||||
public void set_pause(Xmpp.Xep.JingleRtp.Stream stream, bool pause) {
|
||||
Stream plugin_stream = stream as Stream;
|
||||
if (plugin_stream == null) return;
|
||||
if (pause) {
|
||||
plugin_stream.pause();
|
||||
} else {
|
||||
plugin_stream.unpause();
|
||||
Timeout.add_seconds(3, () => {
|
||||
dump_dot();
|
||||
return false;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public void set_device(Xmpp.Xep.JingleRtp.Stream stream, MediaDevice? device) {
|
||||
Device real_device = device as Device;
|
||||
Stream plugin_stream = stream as Stream;
|
||||
if (real_device == null || plugin_stream == null) return;
|
||||
if (real_device.is_source) {
|
||||
plugin_stream.input_device = real_device;
|
||||
} else if (real_device.is_sink) {
|
||||
plugin_stream.output_device = real_device;
|
||||
}
|
||||
}
|
||||
}
|
3
plugins/rtp/src/register_plugin.vala
Normal file
3
plugins/rtp/src/register_plugin.vala
Normal file
|
@ -0,0 +1,3 @@
|
|||
public Type register_plugin(Module module) {
|
||||
return typeof (Dino.Plugins.Rtp.Plugin);
|
||||
}
|
432
plugins/rtp/src/stream.vala
Normal file
432
plugins/rtp/src/stream.vala
Normal file
|
@ -0,0 +1,432 @@
|
|||
using Gee;
|
||||
using Xmpp;
|
||||
|
||||
public class Dino.Plugins.Rtp.Stream : Xmpp.Xep.JingleRtp.Stream {
|
||||
public uint8 rtpid { get; private set; }
|
||||
|
||||
public Plugin plugin { get; private set; }
|
||||
public Gst.Pipeline pipe { get {
|
||||
return plugin.pipe;
|
||||
}}
|
||||
public Gst.Element rtpbin { get {
|
||||
return plugin.rtpbin;
|
||||
}}
|
||||
public CodecUtil codec_util { get {
|
||||
return plugin.codec_util;
|
||||
}}
|
||||
private Gst.App.Sink send_rtp;
|
||||
private Gst.App.Sink send_rtcp;
|
||||
private Gst.App.Src recv_rtp;
|
||||
private Gst.App.Src recv_rtcp;
|
||||
private Gst.Element encode;
|
||||
private Gst.Element decode;
|
||||
private Gst.Element input;
|
||||
private Gst.Element output;
|
||||
|
||||
private Device _input_device;
|
||||
public Device input_device { get { return _input_device; } set {
|
||||
if (!paused) {
|
||||
if (this._input_device != null) {
|
||||
this._input_device.unlink();
|
||||
this._input_device = null;
|
||||
}
|
||||
set_input(value != null ? value.link_source() : null);
|
||||
}
|
||||
this._input_device = value;
|
||||
}}
|
||||
private Device _output_device;
|
||||
public Device output_device { get { return _output_device; } set {
|
||||
if (output != null) remove_output(output);
|
||||
if (value != null) add_output(value.link_sink());
|
||||
this._output_device = value;
|
||||
}}
|
||||
|
||||
public bool created { get; private set; default = false; }
|
||||
public bool paused { get; private set; default = false; }
|
||||
private bool push_recv_data = false;
|
||||
private string participant_ssrc = null;
|
||||
|
||||
private Gst.Pad recv_rtcp_sink_pad;
|
||||
private Gst.Pad recv_rtp_sink_pad;
|
||||
private Gst.Pad recv_rtp_src_pad;
|
||||
private Gst.Pad send_rtcp_src_pad;
|
||||
private Gst.Pad send_rtp_sink_pad;
|
||||
private Gst.Pad send_rtp_src_pad;
|
||||
|
||||
public Stream(Plugin plugin, Xmpp.Xep.Jingle.Content content) {
|
||||
base(content);
|
||||
this.plugin = plugin;
|
||||
this.rtpid = plugin.next_free_id();
|
||||
|
||||
content.notify["senders"].connect_after(on_senders_changed);
|
||||
}
|
||||
|
||||
public void on_senders_changed() {
|
||||
if (sending && input == null) {
|
||||
input_device = plugin.get_preferred_device(media, false);
|
||||
}
|
||||
if (receiving && output == null) {
|
||||
output_device = plugin.get_preferred_device(media, true);
|
||||
}
|
||||
}
|
||||
|
||||
public override void create() {
|
||||
plugin.pause();
|
||||
|
||||
// Create i/o if needed
|
||||
|
||||
if (input == null && input_device == null && sending) {
|
||||
input_device = plugin.get_preferred_device(media, false);
|
||||
}
|
||||
if (output == null && output_device == null && receiving && media == "audio") {
|
||||
output_device = plugin.get_preferred_device(media, true);
|
||||
}
|
||||
|
||||
// Create app elements
|
||||
send_rtp = Gst.ElementFactory.make("appsink", @"rtp-sink-$rtpid") as Gst.App.Sink;
|
||||
send_rtp.async = false;
|
||||
send_rtp.caps = CodecUtil.get_caps(media, payload_type);
|
||||
send_rtp.emit_signals = true;
|
||||
send_rtp.sync = false;
|
||||
send_rtp.new_sample.connect(on_new_sample);
|
||||
pipe.add(send_rtp);
|
||||
|
||||
send_rtcp = Gst.ElementFactory.make("appsink", @"rtcp-sink-$rtpid") as Gst.App.Sink;
|
||||
send_rtcp.async = false;
|
||||
send_rtcp.caps = new Gst.Caps.empty_simple("application/x-rtcp");
|
||||
send_rtcp.emit_signals = true;
|
||||
send_rtcp.sync = false;
|
||||
send_rtcp.new_sample.connect(on_new_sample);
|
||||
pipe.add(send_rtcp);
|
||||
|
||||
recv_rtp = Gst.ElementFactory.make("appsrc", @"rtp-src-$rtpid") as Gst.App.Src;
|
||||
recv_rtp.caps = CodecUtil.get_caps(media, payload_type);
|
||||
recv_rtp.do_timestamp = true;
|
||||
recv_rtp.format = Gst.Format.TIME;
|
||||
recv_rtp.is_live = true;
|
||||
pipe.add(recv_rtp);
|
||||
|
||||
recv_rtcp = Gst.ElementFactory.make("appsrc", @"rtcp-src-$rtpid") as Gst.App.Src;
|
||||
recv_rtcp.caps = new Gst.Caps.empty_simple("application/x-rtcp");
|
||||
recv_rtcp.do_timestamp = true;
|
||||
recv_rtcp.format = Gst.Format.TIME;
|
||||
recv_rtcp.is_live = true;
|
||||
pipe.add(recv_rtcp);
|
||||
|
||||
// Connect RTCP
|
||||
send_rtcp_src_pad = rtpbin.get_request_pad(@"send_rtcp_src_$rtpid");
|
||||
send_rtcp_src_pad.link(send_rtcp.get_static_pad("sink"));
|
||||
recv_rtcp_sink_pad = rtpbin.get_request_pad(@"recv_rtcp_sink_$rtpid");
|
||||
recv_rtcp.get_static_pad("src").link(recv_rtcp_sink_pad);
|
||||
|
||||
// Connect input
|
||||
encode = codec_util.get_encode_bin(media, payload_type, @"encode-$rtpid");
|
||||
pipe.add(encode);
|
||||
send_rtp_sink_pad = rtpbin.get_request_pad(@"send_rtp_sink_$rtpid");
|
||||
encode.get_static_pad("src").link(send_rtp_sink_pad);
|
||||
if (input != null) {
|
||||
input.link(encode);
|
||||
}
|
||||
|
||||
// Connect output
|
||||
decode = codec_util.get_decode_bin(media, payload_type, @"decode-$rtpid");
|
||||
pipe.add(decode);
|
||||
if (output != null) {
|
||||
decode.link(output);
|
||||
}
|
||||
|
||||
// Connect RTP
|
||||
recv_rtp_sink_pad = rtpbin.get_request_pad(@"recv_rtp_sink_$rtpid");
|
||||
recv_rtp.get_static_pad("src").link(recv_rtp_sink_pad);
|
||||
|
||||
created = true;
|
||||
push_recv_data = true;
|
||||
plugin.unpause();
|
||||
}
|
||||
|
||||
private Gst.FlowReturn on_new_sample(Gst.App.Sink sink) {
|
||||
if (sink == null) {
|
||||
debug("Sink is null");
|
||||
return Gst.FlowReturn.EOS;
|
||||
}
|
||||
Gst.Sample sample = sink.pull_sample();
|
||||
Gst.Buffer buffer = sample.get_buffer();
|
||||
uint8[] data;
|
||||
buffer.extract_dup(0, buffer.get_size(), out data);
|
||||
if (sink == send_rtp) {
|
||||
on_send_rtp_data(new Bytes.take(data));
|
||||
} else if (sink == send_rtcp) {
|
||||
on_send_rtcp_data(new Bytes.take(data));
|
||||
} else {
|
||||
warning("unknown sample");
|
||||
}
|
||||
return Gst.FlowReturn.OK;
|
||||
}
|
||||
|
||||
private static Gst.PadProbeReturn drop_probe() {
|
||||
return Gst.PadProbeReturn.DROP;
|
||||
}
|
||||
|
||||
public override void destroy() {
|
||||
// Stop network communication
|
||||
push_recv_data = false;
|
||||
recv_rtp.end_of_stream();
|
||||
recv_rtcp.end_of_stream();
|
||||
send_rtp.new_sample.disconnect(on_new_sample);
|
||||
send_rtcp.new_sample.disconnect(on_new_sample);
|
||||
|
||||
// Disconnect input device
|
||||
if (input != null) {
|
||||
input.unlink(encode);
|
||||
input = null;
|
||||
}
|
||||
if (this._input_device != null) {
|
||||
if (!paused) this._input_device.unlink();
|
||||
this._input_device = null;
|
||||
}
|
||||
|
||||
// Disconnect encode
|
||||
encode.set_locked_state(true);
|
||||
encode.set_state(Gst.State.NULL);
|
||||
encode.get_static_pad("src").unlink(send_rtp_sink_pad);
|
||||
pipe.remove(encode);
|
||||
encode = null;
|
||||
|
||||
// Disconnect RTP sending
|
||||
if (send_rtp_src_pad != null) {
|
||||
send_rtp_src_pad.add_probe(Gst.PadProbeType.BLOCK, drop_probe);
|
||||
send_rtp_src_pad.unlink(send_rtp.get_static_pad("sink"));
|
||||
}
|
||||
send_rtp.set_locked_state(true);
|
||||
send_rtp.set_state(Gst.State.NULL);
|
||||
pipe.remove(send_rtp);
|
||||
send_rtp = null;
|
||||
|
||||
// Disconnect decode
|
||||
if (recv_rtp_src_pad != null) {
|
||||
recv_rtp_src_pad.add_probe(Gst.PadProbeType.BLOCK, drop_probe);
|
||||
recv_rtp_src_pad.unlink(decode.get_static_pad("sink"));
|
||||
}
|
||||
|
||||
// Disconnect RTP receiving
|
||||
recv_rtp.set_locked_state(true);
|
||||
recv_rtp.set_state(Gst.State.NULL);
|
||||
recv_rtp.get_static_pad("src").unlink(recv_rtp_sink_pad);
|
||||
pipe.remove(recv_rtp);
|
||||
recv_rtp = null;
|
||||
|
||||
// Disconnect output
|
||||
if (output != null) {
|
||||
decode.unlink(output);
|
||||
}
|
||||
decode.set_locked_state(true);
|
||||
decode.set_state(Gst.State.NULL);
|
||||
pipe.remove(decode);
|
||||
decode = null;
|
||||
output = null;
|
||||
|
||||
// Disconnect output device
|
||||
if (this._output_device != null) {
|
||||
this._output_device.unlink();
|
||||
this._output_device = null;
|
||||
}
|
||||
|
||||
// Disconnect RTCP receiving
|
||||
recv_rtcp.get_static_pad("src").unlink(recv_rtcp_sink_pad);
|
||||
recv_rtcp.set_locked_state(true);
|
||||
recv_rtcp.set_state(Gst.State.NULL);
|
||||
pipe.remove(recv_rtcp);
|
||||
recv_rtcp = null;
|
||||
|
||||
// Disconnect RTCP sending
|
||||
send_rtcp_src_pad.unlink(send_rtcp.get_static_pad("sink"));
|
||||
send_rtcp.set_locked_state(true);
|
||||
send_rtcp.set_state(Gst.State.NULL);
|
||||
pipe.remove(send_rtcp);
|
||||
send_rtcp = null;
|
||||
|
||||
// Release rtp pads
|
||||
rtpbin.release_request_pad(send_rtp_sink_pad);
|
||||
send_rtp_sink_pad = null;
|
||||
rtpbin.release_request_pad(recv_rtp_sink_pad);
|
||||
recv_rtp_sink_pad = null;
|
||||
rtpbin.release_request_pad(recv_rtcp_sink_pad);
|
||||
recv_rtcp_sink_pad = null;
|
||||
rtpbin.release_request_pad(send_rtcp_src_pad);
|
||||
send_rtcp_src_pad = null;
|
||||
send_rtp_src_pad = null;
|
||||
recv_rtp_src_pad = null;
|
||||
}
|
||||
|
||||
public override void on_recv_rtp_data(Bytes bytes) {
|
||||
if (push_recv_data) {
|
||||
recv_rtp.push_buffer(new Gst.Buffer.wrapped_bytes(bytes));
|
||||
}
|
||||
}
|
||||
|
||||
public override void on_recv_rtcp_data(Bytes bytes) {
|
||||
if (push_recv_data) {
|
||||
recv_rtcp.push_buffer(new Gst.Buffer.wrapped_bytes(bytes));
|
||||
}
|
||||
}
|
||||
|
||||
public override void on_rtp_ready() {
|
||||
// If full frame has been sent before the connection was ready, the counterpart would only display our video after the next full frame.
|
||||
// Send a full frame to let the counterpart display our video asap
|
||||
rtpbin.send_event(new Gst.Event.custom(
|
||||
Gst.EventType.CUSTOM_UPSTREAM,
|
||||
new Gst.Structure("GstForceKeyUnit", "all-headers", typeof(bool), true, null))
|
||||
);
|
||||
}
|
||||
|
||||
public override void on_rtcp_ready() {
|
||||
int rtp_session_id = (int) rtpid;
|
||||
uint64 max_delay = int.MAX;
|
||||
Object rtp_session;
|
||||
bool rtp_sent;
|
||||
GLib.Signal.emit_by_name(rtpbin, "get-internal-session", rtp_session_id, out rtp_session);
|
||||
GLib.Signal.emit_by_name(rtp_session, "send-rtcp-full", max_delay, out rtp_sent);
|
||||
debug("RTCP is ready, resending rtcp: %s", rtp_sent.to_string());
|
||||
}
|
||||
|
||||
public void on_ssrc_pad_added(string ssrc, Gst.Pad pad) {
|
||||
participant_ssrc = ssrc;
|
||||
recv_rtp_src_pad = pad;
|
||||
if (decode != null) {
|
||||
plugin.pause();
|
||||
debug("Link %s to %s decode for %s", recv_rtp_src_pad.name, media, name);
|
||||
recv_rtp_src_pad.link(decode.get_static_pad("sink"));
|
||||
plugin.unpause();
|
||||
}
|
||||
}
|
||||
|
||||
public void on_send_rtp_src_added(Gst.Pad pad) {
|
||||
send_rtp_src_pad = pad;
|
||||
if (send_rtp != null) {
|
||||
plugin.pause();
|
||||
debug("Link %s to %s send_rtp for %s", send_rtp_src_pad.name, media, name);
|
||||
send_rtp_src_pad.link(send_rtp.get_static_pad("sink"));
|
||||
plugin.unpause();
|
||||
}
|
||||
}
|
||||
|
||||
public void set_input(Gst.Element? input) {
|
||||
set_input_and_pause(input, paused);
|
||||
}
|
||||
|
||||
private void set_input_and_pause(Gst.Element? input, bool paused) {
|
||||
if (created && this.input != null) {
|
||||
this.input.unlink(encode);
|
||||
this.input = null;
|
||||
}
|
||||
|
||||
this.input = input;
|
||||
this.paused = paused;
|
||||
|
||||
if (created && sending && !paused && input != null) {
|
||||
plugin.pause();
|
||||
input.link(encode);
|
||||
plugin.unpause();
|
||||
}
|
||||
}
|
||||
|
||||
public void pause() {
|
||||
if (paused) return;
|
||||
set_input_and_pause(null, true);
|
||||
if (input_device != null) input_device.unlink();
|
||||
}
|
||||
|
||||
public void unpause() {
|
||||
if (!paused) return;
|
||||
set_input_and_pause(input_device != null ? input_device.link_source() : null, false);
|
||||
}
|
||||
|
||||
ulong block_probe_handler_id = 0;
|
||||
public virtual void add_output(Gst.Element element) {
|
||||
if (output != null) {
|
||||
critical("add_output() invoked more than once");
|
||||
return;
|
||||
}
|
||||
this.output = element;
|
||||
if (created) {
|
||||
plugin.pause();
|
||||
decode.link(element);
|
||||
if (block_probe_handler_id != 0) {
|
||||
decode.get_static_pad("src").remove_probe(block_probe_handler_id);
|
||||
}
|
||||
plugin.unpause();
|
||||
}
|
||||
}
|
||||
|
||||
public virtual void remove_output(Gst.Element element) {
|
||||
if (output != element) {
|
||||
critical("remove_output() invoked without prior add_output()");
|
||||
return;
|
||||
}
|
||||
if (created) {
|
||||
block_probe_handler_id = decode.get_static_pad("src").add_probe(Gst.PadProbeType.BLOCK, drop_probe);
|
||||
decode.unlink(element);
|
||||
}
|
||||
if (this._output_device != null) {
|
||||
this._output_device.unlink();
|
||||
this._output_device = null;
|
||||
}
|
||||
this.output = null;
|
||||
}
|
||||
}
|
||||
|
||||
public class Dino.Plugins.Rtp.VideoStream : Stream {
|
||||
private Gee.List<Gst.Element> outputs = new ArrayList<Gst.Element>();
|
||||
private Gst.Element output_tee;
|
||||
|
||||
public VideoStream(Plugin plugin, Xmpp.Xep.Jingle.Content content) {
|
||||
base(plugin, content);
|
||||
if (media != "video") critical("VideoStream created for non-video media");
|
||||
}
|
||||
|
||||
public override void create() {
|
||||
plugin.pause();
|
||||
output_tee = Gst.ElementFactory.make("tee", null);
|
||||
output_tee.@set("allow-not-linked", true);
|
||||
pipe.add(output_tee);
|
||||
add_output(output_tee);
|
||||
base.create();
|
||||
foreach (Gst.Element output in outputs) {
|
||||
output_tee.link(output);
|
||||
}
|
||||
plugin.unpause();
|
||||
}
|
||||
|
||||
public override void destroy() {
|
||||
foreach (Gst.Element output in outputs) {
|
||||
output_tee.unlink(output);
|
||||
}
|
||||
base.destroy();
|
||||
output_tee.set_locked_state(true);
|
||||
output_tee.set_state(Gst.State.NULL);
|
||||
pipe.remove(output_tee);
|
||||
output_tee = null;
|
||||
}
|
||||
|
||||
public override void add_output(Gst.Element element) {
|
||||
if (element == output_tee) {
|
||||
base.add_output(element);
|
||||
return;
|
||||
}
|
||||
outputs.add(element);
|
||||
if (output_tee != null) {
|
||||
output_tee.link(element);
|
||||
}
|
||||
}
|
||||
|
||||
public override void remove_output(Gst.Element element) {
|
||||
if (element == output_tee) {
|
||||
base.remove_output(element);
|
||||
return;
|
||||
}
|
||||
outputs.remove(element);
|
||||
if (output_tee != null) {
|
||||
output_tee.unlink(element);
|
||||
}
|
||||
}
|
||||
}
|
110
plugins/rtp/src/video_widget.vala
Normal file
110
plugins/rtp/src/video_widget.vala
Normal file
|
@ -0,0 +1,110 @@
|
|||
public class Dino.Plugins.Rtp.VideoWidget : Gtk.Bin, Dino.Plugins.VideoCallWidget {
|
||||
private static uint last_id = 0;
|
||||
|
||||
public uint id { get; private set; }
|
||||
public Gst.Element element { get; private set; }
|
||||
public Gtk.Widget widget { get; private set; }
|
||||
|
||||
public Plugin plugin { get; private set; }
|
||||
public Gst.Pipeline pipe { get {
|
||||
return plugin.pipe;
|
||||
}}
|
||||
|
||||
private bool attached;
|
||||
private Device? connected_device;
|
||||
private Stream? connected_stream;
|
||||
private Gst.Element convert;
|
||||
|
||||
public VideoWidget(Plugin plugin) {
|
||||
this.plugin = plugin;
|
||||
|
||||
id = last_id++;
|
||||
element = Gst.ElementFactory.make("gtksink", @"video-widget-$id");
|
||||
if (element != null) {
|
||||
Gtk.Widget widget;
|
||||
element.@get("widget", out widget);
|
||||
element.@set("async", false);
|
||||
element.@set("sync", false);
|
||||
this.widget = widget;
|
||||
add(widget);
|
||||
widget.visible = true;
|
||||
|
||||
// Listen for resolution changes
|
||||
element.get_static_pad("sink").notify["caps"].connect(() => {
|
||||
if (element.get_static_pad("sink").caps == null) return;
|
||||
|
||||
int width, height;
|
||||
element.get_static_pad("sink").caps.get_structure(0).get_int("width", out width);
|
||||
element.get_static_pad("sink").caps.get_structure(0).get_int("height", out height);
|
||||
resolution_changed(width, height);
|
||||
});
|
||||
} else {
|
||||
warning("Could not create GTK video sink. Won't display videos.");
|
||||
}
|
||||
}
|
||||
|
||||
public void display_stream(Xmpp.Xep.JingleRtp.Stream stream) {
|
||||
if (element == null) return;
|
||||
detach();
|
||||
if (stream.media != "video") return;
|
||||
connected_stream = stream as Stream;
|
||||
if (connected_stream == null) return;
|
||||
plugin.pause();
|
||||
pipe.add(element);
|
||||
convert = Gst.parse_bin_from_description(@"videoconvert name=video-widget-$id-convert", true);
|
||||
convert.name = @"video-widget-$id-prepare";
|
||||
pipe.add(convert);
|
||||
convert.link(element);
|
||||
connected_stream.add_output(convert);
|
||||
element.set_locked_state(false);
|
||||
plugin.unpause();
|
||||
attached = true;
|
||||
}
|
||||
|
||||
public void display_device(MediaDevice media_device) {
|
||||
if (element == null) return;
|
||||
detach();
|
||||
connected_device = media_device as Device;
|
||||
if (connected_device == null) return;
|
||||
plugin.pause();
|
||||
pipe.add(element);
|
||||
convert = Gst.parse_bin_from_description(@"videoflip method=horizontal-flip name=video-widget-$id-flip ! videoconvert name=video-widget-$id-convert", true);
|
||||
convert.name = @"video-widget-$id-prepare";
|
||||
pipe.add(convert);
|
||||
convert.link(element);
|
||||
connected_device.link_source().link(convert);
|
||||
element.set_locked_state(false);
|
||||
plugin.unpause();
|
||||
attached = true;
|
||||
}
|
||||
|
||||
public void detach() {
|
||||
if (element == null) return;
|
||||
if (attached) {
|
||||
if (connected_stream != null) {
|
||||
connected_stream.remove_output(convert);
|
||||
connected_stream = null;
|
||||
}
|
||||
if (connected_device != null) {
|
||||
connected_device.link_source().unlink(element);
|
||||
connected_device.unlink(); // We get a new ref to recover the element, so unlink twice
|
||||
connected_device.unlink();
|
||||
connected_device = null;
|
||||
}
|
||||
convert.set_locked_state(true);
|
||||
convert.set_state(Gst.State.NULL);
|
||||
pipe.remove(convert);
|
||||
convert = null;
|
||||
element.set_locked_state(true);
|
||||
element.set_state(Gst.State.NULL);
|
||||
pipe.remove(element);
|
||||
attached = false;
|
||||
}
|
||||
}
|
||||
|
||||
public override void dispose() {
|
||||
detach();
|
||||
widget = null;
|
||||
element = null;
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue