comparison libervia/frontends/tools/webrtc.py @ 4204:879bad48cc2d

frontends (tools/webrtc): implement X11 desktop sharing: rel 433
author Goffi <goffi@goffi.org>
date Tue, 16 Jan 2024 10:42:00 +0100
parents 60d107f2178a
children 17a8168966f9
comparison
equal deleted inserted replaced
4203:4af030d4d3d8 4204:879bad48cc2d
39 import re 39 import re
40 from typing import Callable 40 from typing import Callable
41 from urllib.parse import quote_plus 41 from urllib.parse import quote_plus
42 42
43 from libervia.backend.tools.common import data_format 43 from libervia.backend.tools.common import data_format
44 from libervia.frontends.tools import aio 44 from libervia.frontends.tools import aio, display_servers
45
46 current_server = display_servers.detect()
47 if current_server == display_servers.X11:
48 # GSTreamer's ximagesrc documentation asks to run this function
49 import ctypes
50 ctypes.CDLL('libX11.so.6').XInitThreads()
45 51
46 52
47 log = logging.getLogger(__name__) 53 log = logging.getLogger(__name__)
48 54
49 Gst.init(None) 55 Gst.init(None)
81 self.bridge = bridge 87 self.bridge = bridge
82 self.profile = profile 88 self.profile = profile
83 self.pipeline = None 89 self.pipeline = None
84 self._audio_muted = False 90 self._audio_muted = False
85 self._video_muted = False 91 self._video_muted = False
92 self._desktop_sharing = False
93 self.desktop_sharing_data = None
86 self.sources = sources 94 self.sources = sources
87 self.sinks = sinks 95 self.sinks = sinks
88 if sinks == SINKS_APP: 96 if sinks == SINKS_APP:
89 self.appsink_data = appsink_data 97 self.appsink_data = appsink_data
90 elif appsink_data is not None: 98 elif appsink_data is not None:
111 @video_muted.setter 119 @video_muted.setter
112 def video_muted(self, muted: bool) -> None: 120 def video_muted(self, muted: bool) -> None:
113 if muted != self._video_muted: 121 if muted != self._video_muted:
114 self._video_muted = muted 122 self._video_muted = muted
115 self.on_video_mute(muted) 123 self.on_video_mute(muted)
124
125 @property
126 def desktop_sharing(self):
127 return self._desktop_sharing
128
129 @desktop_sharing.setter
130 def desktop_sharing(self, active: bool) -> None:
131 if active != self._desktop_sharing:
132 self._desktop_sharing = active
133 self.on_desktop_switch(active)
116 134
117 @property 135 @property
118 def sdp_set(self): 136 def sdp_set(self):
119 return self._sdp_set 137 return self._sdp_set
120 138
382 ! tee name=t 400 ! tee name=t
383 401
384 {extra_elt} 402 {extra_elt}
385 403
386 {video_source_elt} name=video_src ! queue leaky=downstream ! video_selector. 404 {video_source_elt} name=video_src ! queue leaky=downstream ! video_selector.
387 videotestsrc is-live=true pattern=black ! queue leaky=downstream ! video_selector. 405 videotestsrc name=muted_src is-live=true pattern=black ! queue leaky=downstream ! video_selector.
388 406
389 t. 407 t.
390 ! queue max-size-buffers=5 max-size-time=0 max-size-bytes=0 leaky=downstream 408 ! queue max-size-buffers=5 max-size-time=0 max-size-bytes=0 leaky=downstream
391 ! videoconvert 409 ! videoconvert
392 ! vp8enc deadline=1 keyframe-max-dist=60 410 ! vp8enc deadline=1 keyframe-max-dist=60
417 if not self.pipeline: 435 if not self.pipeline:
418 raise exceptions.InternalError("Failed to create Gstreamer pipeline.") 436 raise exceptions.InternalError("Failed to create Gstreamer pipeline.")
419 437
420 self.webrtcbin = self.pipeline.get_by_name("sendrecv") 438 self.webrtcbin = self.pipeline.get_by_name("sendrecv")
421 self.video_src = self.pipeline.get_by_name("video_src") 439 self.video_src = self.pipeline.get_by_name("video_src")
440 self.muted_src = self.pipeline.get_by_name("muted_src")
422 self.video_selector = self.pipeline.get_by_name("video_selector") 441 self.video_selector = self.pipeline.get_by_name("video_selector")
423 self.audio_valve = self.pipeline.get_by_name("audio_valve") 442 self.audio_valve = self.pipeline.get_by_name("audio_valve")
424 443
425 if self.video_muted: 444 if self.video_muted:
426 self.on_video_mute(True) 445 self.on_video_mute(True)
886 @param message: The eos message. 905 @param message: The eos message.
887 """ 906 """
888 log.info("End of stream") 907 log.info("End of stream")
889 908
890 def on_audio_mute(self, muted: bool) -> None: 909 def on_audio_mute(self, muted: bool) -> None:
910 """Handles (un)muting of audio.
911
912 @param muted: True if audio is muted.
913 """
891 if self.audio_valve is not None: 914 if self.audio_valve is not None:
892 self.audio_valve.set_property("drop", muted) 915 self.audio_valve.set_property("drop", muted)
893 state = "muted" if muted else "unmuted" 916 state = "muted" if muted else "unmuted"
894 log.info(f"audio is now {state}") 917 log.info(f"audio is now {state}")
895 918
896 def on_video_mute(self, muted: bool) -> None: 919 def on_video_mute(self, muted: bool) -> None:
920 """Handles (un)muting of video.
921
922 @param muted: True if video is muted.
923 """
897 if self.video_selector is not None: 924 if self.video_selector is not None:
898 # when muted, we switch to a black image and deactivate the camera 925 current_source = None if muted else "desktop" if self.desktop_sharing else "camera"
899 if not muted: 926 self.switch_video_source(current_source)
900 self.video_src.set_state(Gst.State.PLAYING)
901 pad = self.video_selector.get_static_pad("sink_1" if muted else "sink_0")
902 self.video_selector.props.active_pad = pad
903 if muted:
904 self.video_src.set_state(Gst.State.NULL)
905 state = "muted" if muted else "unmuted" 927 state = "muted" if muted else "unmuted"
906 log.info(f"video is now {state}") 928 log.info(f"Video is now {state}")
929
930 def on_desktop_switch(self, desktop_active: bool) -> None:
931 """Switches the video source between desktop and camera.
932
933 @param desktop_active: True if desktop must be active. False for camera.
934 """
935 if self.video_muted:
936 # Update the active source state but do not switch
937 self.desktop_sharing = desktop_active
938 return
939
940 source = "desktop" if desktop_active else "camera"
941 self.switch_video_source(source)
942 self.desktop_sharing = desktop_active
943
944 def switch_video_source(self, source: str|None) -> None:
945 """Activates the specified source while deactivating the others.
946
947 @param source: 'desktop', 'camera', 'muted' or None for muted source.
948 """
949 if source is None:
950 source = "muted"
951 if source not in ["camera", "muted", "desktop"]:
952 raise ValueError(
953 f"Invalid source: {source!r}, use one of {'camera', 'muted', 'desktop'}"
954 )
955
956 self.pipeline.set_state(Gst.State.PAUSED)
957
958 # Create a new desktop source if necessary
959 if source == "desktop":
960 self._setup_desktop_source(self.desktop_sharing_data)
961
962 # Activate the chosen source and deactivate the others
963 for src_name in ["camera", "muted", "desktop"]:
964 src_element = self.pipeline.get_by_name(f"{src_name}_src")
965 if src_name == source:
966 if src_element:
967 src_element.set_state(Gst.State.PLAYING)
968 else:
969 if src_element:
970 src_element.set_state(Gst.State.NULL)
971 if src_name == "desktop":
972 self._remove_desktop_source(src_element)
973
974 # Set the video_selector active pad
975 pad_name = f"sink_{['camera', 'muted', 'desktop'].index(source)}"
976 pad = self.video_selector.get_static_pad(pad_name)
977 self.video_selector.props.active_pad = pad
978 self.pipeline.set_state(Gst.State.PLAYING)
979
980 def _setup_desktop_source(self, properties: dict[str, object]|None) -> None:
981 """Set up a new desktop source.
982
983 @param properties: The properties to set on the desktop source.
984 """
985 desktop_src = Gst.ElementFactory.make("ximagesrc", "desktop_src")
986 if properties is None:
987 properties = {}
988 for key, value in properties.items():
989 log.debug(f"setting ximagesrc property: {key!r}={value!r}")
990 desktop_src.set_property(key, value)
991 video_convert = Gst.ElementFactory.make("videoconvert", "desktop_videoconvert")
992 queue = Gst.ElementFactory.make("queue", "desktop_queue")
993 queue.set_property("leaky", "downstream")
994
995 self.pipeline.add(desktop_src)
996 self.pipeline.add(video_convert)
997 self.pipeline.add(queue)
998
999 desktop_src.link(video_convert)
1000 video_convert.link(queue)
1001
1002 sink_pad_template = self.video_selector.get_pad_template("sink_%u")
1003 sink_pad = self.video_selector.request_pad(sink_pad_template, None, None)
1004 queue_src_pad = queue.get_static_pad("src")
1005 queue_src_pad.link(sink_pad)
1006
1007 desktop_src.sync_state_with_parent()
1008 video_convert.sync_state_with_parent()
1009 queue.sync_state_with_parent()
1010
1011 def _remove_desktop_source(self, desktop_src: Gst.Element) -> None:
1012 """Remove the desktop source from the pipeline.
1013
1014 @param desktop_src: The desktop source to remove.
1015 """
1016 # Remove elements for the desktop source
1017 video_convert = self.pipeline.get_by_name("desktop_videoconvert")
1018 queue = self.pipeline.get_by_name("desktop_queue")
1019 if video_convert:
1020 video_convert.set_state(Gst.State.NULL)
1021 desktop_src.unlink(video_convert)
1022 self.pipeline.remove(video_convert)
1023 if queue:
1024 queue.set_state(Gst.State.NULL)
1025 self.pipeline.remove(queue)
1026 self.pipeline.remove(desktop_src)
907 1027
908 async def end_call(self) -> None: 1028 async def end_call(self) -> None:
909 """Stop streaming and clean instance""" 1029 """Stop streaming and clean instance"""
910 self.reset_instance() 1030 self.reset_instance()