Mercurial > libervia-backend
comparison libervia/frontends/tools/webrtc_remote_control.py @ 4270:0d7bb4df2343
Reformatted code base using black.
author | Goffi <goffi@goffi.org> |
---|---|
date | Wed, 19 Jun 2024 18:44:57 +0200 |
parents | 8acf46ed7f36 |
children |
comparison
equal
deleted
inserted
replaced
4269:64a85ce8be70 | 4270:0d7bb4df2343 |
---|---|
35 SourcesNone, | 35 SourcesNone, |
36 SourcesPipeline, | 36 SourcesPipeline, |
37 ) | 37 ) |
38 | 38 |
39 gi.require_versions({"Gst": "1.0", "GstWebRTC": "1.0"}) | 39 gi.require_versions({"Gst": "1.0", "GstWebRTC": "1.0"}) |
40 OnOpenCbType = Callable[["WebRTCRemoteController"], None|Awaitable[None]] | 40 OnOpenCbType = Callable[["WebRTCRemoteController"], None | Awaitable[None]] |
41 MOUSE_BTN_LEFT = 0x110 | 41 MOUSE_BTN_LEFT = 0x110 |
42 MOUSE_BTN_RIGHT = 0x111 | 42 MOUSE_BTN_RIGHT = 0x111 |
43 MOUSE_BTN_MIDDLE = 0x112 | 43 MOUSE_BTN_MIDDLE = 0x112 |
44 MOUSE_BTN_FORWARD = 0x115 | 44 MOUSE_BTN_FORWARD = 0x115 |
45 MOUSE_BTN_BACK = 0x116 | 45 MOUSE_BTN_BACK = 0x116 |
69 self.bridge = bridge | 69 self.bridge = bridge |
70 self.profile = profile | 70 self.profile = profile |
71 self.on_call_start_cb = on_call_start_cb | 71 self.on_call_start_cb = on_call_start_cb |
72 self.end_call_cb = end_call_cb | 72 self.end_call_cb = end_call_cb |
73 self.loop = asyncio.get_event_loop() | 73 self.loop = asyncio.get_event_loop() |
74 self.data_channel: GstWebRTC.WebRTCDataChannel|None = None | 74 self.data_channel: GstWebRTC.WebRTCDataChannel | None = None |
75 | 75 |
76 def send_input(self, input_data: dict) -> None: | 76 def send_input(self, input_data: dict) -> None: |
77 """Send an input data to controlled device | 77 """Send an input data to controlled device |
78 | 78 |
79 @param input_data: data of the input event. | 79 @param input_data: data of the input event. |
93 "call_data": call_data, | 93 "call_data": call_data, |
94 } | 94 } |
95 rc_data.update(options) | 95 rc_data.update(options) |
96 remote_control_data_s = await self.bridge.remote_control_start( | 96 remote_control_data_s = await self.bridge.remote_control_start( |
97 str(callee), | 97 str(callee), |
98 data_format.serialise( | 98 data_format.serialise(rc_data), |
99 rc_data | |
100 ), | |
101 profile, | 99 profile, |
102 ) | 100 ) |
103 remote_control_data = data_format.deserialise(remote_control_data_s) | 101 remote_control_data = data_format.deserialise(remote_control_data_s) |
104 | 102 |
105 if self.on_call_start_cb is not None: | 103 if self.on_call_start_cb is not None: |
124 | 122 |
125 async def on_dc_opened(self, on_open_cb: OnOpenCbType) -> None: | 123 async def on_dc_opened(self, on_open_cb: OnOpenCbType) -> None: |
126 await aio.maybe_async(on_open_cb(self)) | 124 await aio.maybe_async(on_open_cb(self)) |
127 | 125 |
128 async def start( | 126 async def start( |
129 self, | 127 self, callee: jid.JID, options: dict, on_open_cb: OnOpenCbType |
130 callee: jid.JID, | |
131 options: dict, | |
132 on_open_cb: OnOpenCbType | |
133 ) -> None: | 128 ) -> None: |
134 """Start a remote control session with ``callee`` | 129 """Start a remote control session with ``callee`` |
135 | 130 |
136 @param callee: The JID of the recipient to send the file to. | 131 @param callee: The JID of the recipient to send the file to. |
137 @param options: Options such as which devices to control. | 132 @param options: Options such as which devices to control. |
152 | 147 |
153 | 148 |
154 class WebRTCRemoteControlReceiver: | 149 class WebRTCRemoteControlReceiver: |
155 | 150 |
156 def __init__( | 151 def __init__( |
157 self, bridge, profile: str, on_close_cb: Callable[[], Any] | None = None, | 152 self, |
158 verbose: bool = False | 153 bridge, |
154 profile: str, | |
155 on_close_cb: Callable[[], Any] | None = None, | |
156 verbose: bool = False, | |
159 ) -> None: | 157 ) -> None: |
160 """Initializes the File Receiver. | 158 """Initializes the File Receiver. |
161 | 159 |
162 @param bridge: An async Bridge instance. | 160 @param bridge: An async Bridge instance. |
163 @param profile: The profile name to be used. | 161 @param profile: The profile name to be used. |
167 self.bridge = bridge | 165 self.bridge = bridge |
168 self.profile = profile | 166 self.profile = profile |
169 self.on_close_cb = on_close_cb | 167 self.on_close_cb = on_close_cb |
170 self.loop = asyncio.get_event_loop() | 168 self.loop = asyncio.get_event_loop() |
171 self.desktop_portal = None | 169 self.desktop_portal = None |
172 self.remote_desktop_data: dict|None = None | 170 self.remote_desktop_data: dict | None = None |
173 self.stream_node_id: int|None = None | 171 self.stream_node_id: int | None = None |
174 self.verbose = verbose | 172 self.verbose = verbose |
175 | 173 |
176 async def do_input(self, data: dict) -> None: | 174 async def do_input(self, data: dict) -> None: |
177 assert self.desktop_portal is not None | 175 assert self.desktop_portal is not None |
178 try: | 176 try: |
181 try: | 179 try: |
182 try: | 180 try: |
183 x, y = data["x"], data["y"] | 181 x, y = data["x"], data["y"] |
184 except KeyError: | 182 except KeyError: |
185 dx, dy = data["movementX"], data["movementY"] | 183 dx, dy = data["movementX"], data["movementY"] |
186 await self.desktop_portal.notify_pointer_motion( | 184 await self.desktop_portal.notify_pointer_motion(dx, dy) |
187 dx, dy | |
188 ) | |
189 else: | 185 else: |
190 assert self.stream_node_id is not None | 186 assert self.stream_node_id is not None |
191 await self.desktop_portal.notify_pointer_motion_absolute( | 187 await self.desktop_portal.notify_pointer_motion_absolute( |
192 self.stream_node_id, x, y | 188 self.stream_node_id, x, y |
193 ) | 189 ) |
198 buttons = data["buttons"] | 194 buttons = data["buttons"] |
199 state = 1 if type_ == "mousedown" else 0 | 195 state = 1 if type_ == "mousedown" else 0 |
200 # see https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/buttons#value | 196 # see https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/buttons#value |
201 if buttons & 1: | 197 if buttons & 1: |
202 await self.desktop_portal.notify_pointer_button( | 198 await self.desktop_portal.notify_pointer_button( |
203 MOUSE_BTN_LEFT, | 199 MOUSE_BTN_LEFT, state |
204 state | |
205 ) | 200 ) |
206 if buttons & 2: | 201 if buttons & 2: |
207 await self.desktop_portal.notify_pointer_button( | 202 await self.desktop_portal.notify_pointer_button( |
208 MOUSE_BTN_RIGHT, | 203 MOUSE_BTN_RIGHT, state |
209 state | |
210 ) | 204 ) |
211 if buttons & 4: | 205 if buttons & 4: |
212 await self.desktop_portal.notify_pointer_button( | 206 await self.desktop_portal.notify_pointer_button( |
213 MOUSE_BTN_MIDDLE, | 207 MOUSE_BTN_MIDDLE, state |
214 state | |
215 ) | 208 ) |
216 if buttons & 8: | 209 if buttons & 8: |
217 await self.desktop_portal.notify_pointer_button( | 210 await self.desktop_portal.notify_pointer_button( |
218 MOUSE_BTN_BACK, | 211 MOUSE_BTN_BACK, state |
219 state | |
220 ) | 212 ) |
221 if buttons & 16: | 213 if buttons & 16: |
222 await self.desktop_portal.notify_pointer_button( | 214 await self.desktop_portal.notify_pointer_button( |
223 MOUSE_BTN_FORWARD, | 215 MOUSE_BTN_FORWARD, state |
224 state | |
225 ) | 216 ) |
226 elif type_ == "wheel": | 217 elif type_ == "wheel": |
227 dx = data.get("deltaX", 0) | 218 dx = data.get("deltaX", 0) |
228 dy = data.get("deltaY", 0) | 219 dy = data.get("deltaY", 0) |
229 delta_mode = data["deltaMode"] | 220 delta_mode = data["deltaMode"] |
230 if delta_mode == 0: | 221 if delta_mode == 0: |
231 # deltas are in pixels | 222 # deltas are in pixels |
232 await self.desktop_portal.notify_pointer_axis( | 223 await self.desktop_portal.notify_pointer_axis(dx, dy) |
233 dx, | |
234 dy | |
235 ) | |
236 else: | 224 else: |
237 # deltas are in steps (see | 225 # deltas are in steps (see |
238 # https://developer.mozilla.org/en-US/docs/Web/API/Element/wheel_event#event_properties) | 226 # https://developer.mozilla.org/en-US/docs/Web/API/Element/wheel_event#event_properties) |
239 if dx: | 227 if dx: |
240 await self.desktop_portal.notify_pointer_axis( | 228 await self.desktop_portal.notify_pointer_axis(1, dx) |
241 1, | |
242 dx | |
243 ) | |
244 if dy: | 229 if dy: |
245 await self.desktop_portal.notify_pointer_axis( | 230 await self.desktop_portal.notify_pointer_axis(0, dy) |
246 0, | |
247 dy | |
248 ) | |
249 elif type_.startswith("key"): | 231 elif type_.startswith("key"): |
250 # FIXME: this is a really naive implementation, it needs tot be improved. | 232 # FIXME: this is a really naive implementation, it needs tot be improved. |
251 key = data["key"] | 233 key = data["key"] |
252 if data.get("shiftKey", False): | 234 if data.get("shiftKey", False): |
253 key = key.upper() | 235 key = key.upper() |
256 ) | 238 ) |
257 | 239 |
258 except Exception: | 240 except Exception: |
259 log.exception(f"Can't handle input {data}") | 241 log.exception(f"Can't handle input {data}") |
260 | 242 |
261 | |
262 def _on_dc_message_data(self, data_channel, glib_data) -> None: | 243 def _on_dc_message_data(self, data_channel, glib_data) -> None: |
263 """A data chunk of the file has been received.""" | 244 """A data chunk of the file has been received.""" |
264 raw = glib_data.get_data() | 245 raw = glib_data.get_data() |
265 data = cbor2.loads(raw) | 246 data = cbor2.loads(raw) |
266 if self.verbose: | 247 if self.verbose: |
267 print(data) | 248 print(data) |
268 aio.run_from_thread( | 249 aio.run_from_thread(self.do_input, data, loop=self.loop) |
269 self.do_input, | |
270 data, | |
271 loop=self.loop | |
272 ) | |
273 | 250 |
274 def _on_dc_close(self, data_channel) -> None: | 251 def _on_dc_close(self, data_channel) -> None: |
275 """Data channel is closed | 252 """Data channel is closed |
276 | 253 |
277 The file download should be complete, we close it. | 254 The file download should be complete, we close it. |
282 if self.on_close_cb is not None: | 259 if self.on_close_cb is not None: |
283 await aio.maybe_async(self.on_close_cb()) | 260 await aio.maybe_async(self.on_close_cb()) |
284 | 261 |
285 def _on_data_channel(self, webrtcbin, data_channel) -> None: | 262 def _on_data_channel(self, webrtcbin, data_channel) -> None: |
286 """The data channel has been opened.""" | 263 """The data channel has been opened.""" |
287 data_channel.connect( | 264 data_channel.connect("on-message-data", self._on_dc_message_data) |
288 "on-message-data", self._on_dc_message_data | |
289 ) | |
290 data_channel.connect("on-close", self._on_dc_close) | 265 data_channel.connect("on-close", self._on_dc_close) |
291 | 266 |
292 async def request_remote_desktop(self, with_screen_sharing: bool) -> None: | 267 async def request_remote_desktop(self, with_screen_sharing: bool) -> None: |
293 """Request autorisation to remote control desktop. | 268 """Request autorisation to remote control desktop. |
294 | 269 |
295 @param with_screen_sharing: True if screen must be shared. | 270 @param with_screen_sharing: True if screen must be shared. |
296 """ | 271 """ |
297 from .portal_desktop import DesktopPortal | 272 from .portal_desktop import DesktopPortal |
273 | |
298 self.desktop_portal = DesktopPortal() | 274 self.desktop_portal = DesktopPortal() |
299 self.remote_desktop_data = await self.desktop_portal.request_remote_desktop( | 275 self.remote_desktop_data = await self.desktop_portal.request_remote_desktop( |
300 with_screen_sharing | 276 with_screen_sharing |
301 ) | 277 ) |
302 print(self.remote_desktop_data) | 278 print(self.remote_desktop_data) |
303 | 279 |
304 async def start_receiving( | 280 async def start_receiving( |
305 self, | 281 self, from_jid: jid.JID, session_id: str, screenshare: dict |
306 from_jid: jid.JID, | |
307 session_id: str, | |
308 screenshare: dict | |
309 ) -> None: | 282 ) -> None: |
310 """Receives a file via WebRTC and saves it to the specified path. | 283 """Receives a file via WebRTC and saves it to the specified path. |
311 | 284 |
312 @param from_jid: The JID of the entity sending the file. | 285 @param from_jid: The JID of the entity sending the file. |
313 @param session_id: The Jingle FT Session ID. | 286 @param session_id: The Jingle FT Session ID. |
326 video_pipeline="pipewiresrc", | 299 video_pipeline="pipewiresrc", |
327 audio_pipeline="", | 300 audio_pipeline="", |
328 video_properties={ | 301 video_properties={ |
329 "path": str(self.stream_node_id), | 302 "path": str(self.stream_node_id), |
330 "do-timestamp": 1, | 303 "do-timestamp": 1, |
331 } | 304 }, |
332 ) | 305 ) |
333 except KeyError: | 306 except KeyError: |
334 sources_data = SourcesNone() | 307 sources_data = SourcesNone() |
335 else: | 308 else: |
336 sources_data = SourcesNone() | 309 sources_data = SourcesNone() |
337 | 310 |
338 await webrtc.WebRTCCall.make_webrtc_call( | 311 await webrtc.WebRTCCall.make_webrtc_call( |
339 self.bridge, | 312 self.bridge, |
340 self.profile, | 313 self.profile, |
341 call_data, | 314 call_data, |
342 sources_data = sources_data, | 315 sources_data=sources_data, |
343 sinks_data=webrtc.SinksNone(), | 316 sinks_data=webrtc.SinksNone(), |
344 dc_data_list=[webrtc.SinksDataChannel( | 317 dc_data_list=[ |
345 dc_on_data_channel=self._on_data_channel, | 318 webrtc.SinksDataChannel( |
346 )], | 319 dc_on_data_channel=self._on_data_channel, |
320 ) | |
321 ], | |
347 ) | 322 ) |