-
Notifications
You must be signed in to change notification settings - Fork 25
/
Copy pathsliding_sync.rs
2246 lines (2063 loc) · 105 KB
/
sliding_sync.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
use anyhow::{bail, Result};
use clap::Parser;
use eyeball::Subscriber;
use eyeball_im::VectorDiff;
use futures_util::{pin_mut, StreamExt};
use imbl::Vector;
use makepad_widgets::{error, log, warning, Cx, SignalToUI};
use matrix_sdk::{
config::RequestConfig, event_handler::EventHandlerDropGuard, media::MediaRequest, room::RoomMember, ruma::{
api::client::{receipt::create_receipt::v3::ReceiptType, session::get_login_types::v3::LoginType}, events::{
receipt::ReceiptThread, room::{
message::{ForwardThread, RoomMessageEventContent}, MediaSource
}, FullStateEventContent, MessageLikeEventType, TimelineEventType
}, MilliSecondsSinceUnixEpoch, OwnedEventId, OwnedMxcUri, OwnedRoomAliasId, OwnedRoomId, OwnedUserId, UserId
}, sliding_sync::VersionBuilder, Client, ClientBuildError, Error, Room, RoomMemberships
};
use matrix_sdk_ui::{
room_list_service::{self, RoomListLoadingState},
sync_service::{self, SyncService},
timeline::{AnyOtherFullStateEventContent, EventTimelineItem, RepliedToInfo, TimelineDetails, TimelineItem, TimelineItemContent, MembershipChange},
Timeline,
};
use robius_open::Uri;
use tokio::{
runtime::Handle,
sync::{mpsc::{Receiver, Sender, UnboundedReceiver, UnboundedSender}, watch}, task::JoinHandle,
};
use unicode_segmentation::UnicodeSegmentation;
use std::{cmp::{max, min}, collections::{BTreeMap, BTreeSet}, ops::Not, path:: Path, sync::{Arc, Mutex, OnceLock}};
use std::io;
use crate::{
app_data_dir, avatar_cache::AvatarUpdate, event_preview::text_preview_of_timeline_item, home::{
room_screen::TimelineUpdate, rooms_list::{self, enqueue_rooms_list_update, RoomPreviewAvatar, RoomsListEntry, RoomsListUpdate}
}, login::login_screen::LoginAction, media_cache::MediaCacheEntry, persistent_state::{self, ClientSessionPersisted}, profile::{
user_profile::{AvatarState, UserProfile},
user_profile_cache::{enqueue_user_profile_update, UserProfileUpdate},
}, shared::{jump_to_bottom_button::UnreadMessageCount, popup_list::enqueue_popup_notification}, utils::MEDIA_THUMBNAIL_FORMAT, verification::add_verification_event_handlers_and_sync_client
};
#[derive(Parser, Debug, Default)]
struct Cli {
/// The user ID to login with.
#[clap(value_parser)]
user_id: String,
/// The password that should be used for the login.
#[clap(value_parser)]
password: String,
/// The homeserver to connect to.
#[clap(value_parser)]
homeserver: Option<String>,
/// Set the proxy that should be used for the connection.
#[clap(short, long)]
proxy: Option<String>,
/// Force login screen.
#[clap(short, long, action)]
login_screen: bool,
/// Enable verbose logging output.
#[clap(short, long, action)]
verbose: bool,
}
impl From<LoginByPassword> for Cli {
fn from(login: LoginByPassword) -> Self {
Self {
user_id: login.user_id,
password: login.password,
homeserver: None,
proxy: None,
login_screen: false,
verbose: false,
}
}
}
/// Build a new client.
async fn build_client(
cli: &Cli,
data_dir: &Path,
) -> Result<(Client, ClientSessionPersisted), ClientBuildError> {
// Generate a unique subfolder name for the client database,
// which allows multiple clients to run simultaneously.
let now = chrono::Local::now();
let db_subfolder_name: String = format!("db_{}", now.format("%F_%H_%M_%S_%f"));
let db_path = data_dir.join(db_subfolder_name);
// Generate a random passphrase.
let passphrase: String = {
use rand::{Rng, thread_rng};
thread_rng()
.sample_iter(rand::distributions::Alphanumeric)
.take(32)
.map(char::from)
.collect()
};
let homeserver_url = cli.homeserver.as_deref()
.unwrap_or("https://matrix-client.matrix.org/");
// .unwrap_or("https://matrix.org/");
let mut builder = Client::builder()
.server_name_or_homeserver_url(homeserver_url)
// Use a sqlite database to persist the client's encryption setup.
.sqlite_store(&db_path, Some(&passphrase))
// The sliding sync proxy has now been deprecated in favor of native sliding sync.
.sliding_sync_version_builder(VersionBuilder::DiscoverNative)
.handle_refresh_tokens();
if let Some(proxy) = cli.proxy.as_ref() {
builder = builder.proxy(proxy.clone());
}
// Use a 60 second timeout for all requests to the homeserver.
// Yes, this is a long timeout, but the standard matrix homeserver is often very slow.
builder = builder.request_config(
RequestConfig::new()
.timeout(std::time::Duration::from_secs(60))
);
let client = builder.build().await?;
Ok((
client,
ClientSessionPersisted {
homeserver: homeserver_url.to_string(),
db_path,
passphrase,
},
))
}
/// Logs in to the given Matrix homeserver using the given username and password.
///
/// This function is used by the login screen to log in to the Matrix server.
///
/// Upon success, this function returns the logged-in client and an optional sync token.
async fn login(
cli: &Cli,
login_request: LoginRequest,
login_types: &[LoginType],
) -> Result<(Client, Option<String>)> {
match login_request {
LoginRequest::LoginByCli | LoginRequest::LoginByPassword(_) => {
let cli = if let LoginRequest::LoginByPassword(login_by_password) = login_request {
&Cli::from(login_by_password)
} else {
cli
};
let (client, client_session) = build_client(cli, app_data_dir()).await?;
if !login_types
.iter()
.any(|flow| matches!(flow, LoginType::Password(_)))
{
bail!("Homeserver does not support username + password login flow.");
}
// Attempt to login using the CLI-provided username & password.
let login_result = client
.matrix_auth()
.login_username(&cli.user_id, &cli.password)
.initial_device_display_name("robrix-un-pw")
.send()
.await?;
if client.logged_in() {
log!("Logged in successfully? {:?}", client.logged_in());
let status = format!("Logged in as {}.\n → Loading rooms...", cli.user_id);
// enqueue_popup_notification(status.clone());
enqueue_rooms_list_update(RoomsListUpdate::Status { status });
if let Err(e) = persistent_state::save_session(&client, client_session).await {
let err_msg = format!("Failed to save session state to storage: {e}");
error!("{err_msg}");
enqueue_popup_notification(err_msg);
}
Ok((client, None))
} else {
let err_msg = format!("Failed to login as {}: {:?}", cli.user_id, login_result);
enqueue_popup_notification(err_msg.clone());
enqueue_rooms_list_update(RoomsListUpdate::Status { status: err_msg.clone() });
bail!(err_msg);
}
}
LoginRequest::LoginBySSOSuccess(client, client_session) => {
if let Err(e) = persistent_state::save_session(&client, client_session).await {
error!("Failed to save session state to storage: {e:?}");
}
Ok((client, None))
}
LoginRequest::HomeserverLoginTypesQuery(_) => {
bail!("LoginRequest::HomeserverLoginTypesQuery not handled earlier");
}
}
}
async fn populate_login_types(
homeserver_url: &str,
login_types: &mut Vec<LoginType>,
) -> Result<()> {
Cx::post_action(LoginAction::Status {
title: "Querying login types".into(),
status: "Fetching supported login types from the homeserver...".into(),
});
let homeserver_url = if homeserver_url.is_empty() {
DEFAULT_HOMESERVER
} else {
homeserver_url
};
let client = Client::builder()
.server_name_or_homeserver_url(homeserver_url)
.build()
.await?;
match client.matrix_auth().get_login_types().await {
Ok(login_types_res) => {
*login_types = login_types_res.flows;
let identity_providers = login_types.iter().fold(Vec::new(), |mut acc, login_type| {
if let LoginType::Sso(sso_type) = login_type {
acc.extend_from_slice(sso_type.identity_providers.as_slice());
}
acc
});
Cx::post_action(LoginAction::IdentityProvider(identity_providers));
Ok(())
}
Err(e) => {
Err(e.into())
}
}
}
/// Which direction to paginate in.
///
/// * `Forwards` will retrieve later events (towards the end of the timeline),
/// which only works if the timeline is *focused* on a specific event.
/// * `Backwards`: the more typical choice, in which earlier events are retrieved
/// (towards the start of the timeline), which works in both live mode and focused mode.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PaginationDirection {
Forwards,
Backwards,
}
impl std::fmt::Display for PaginationDirection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Forwards => write!(f, "forwards"),
Self::Backwards => write!(f, "backwards"),
}
}
}
/// The function signature for the callback that gets invoked when media is fetched.
pub type OnMediaFetchedFn = fn(
&Mutex<MediaCacheEntry>,
MediaRequest,
matrix_sdk::Result<Vec<u8>>,
Option<crossbeam_channel::Sender<TimelineUpdate>>,
);
/// The set of requests for async work that can be made to the worker thread.
pub enum MatrixRequest {
/// Request from the login screen to log in with the given credentials.
Login(LoginRequest),
/// Request to paginate the older (or newer) events of a room's timeline.
PaginateRoomTimeline {
room_id: OwnedRoomId,
/// The maximum number of timeline events to fetch in each pagination batch.
num_events: u16,
direction: PaginationDirection,
},
/// Request to fetch the full details of the given event in the given room's timeline.
FetchDetailsForEvent {
room_id: OwnedRoomId,
event_id: OwnedEventId,
},
/// Request to fetch profile information for all members of a room.
/// This can be *very* slow depending on the number of members in the room.
FetchRoomMembers {
room_id: OwnedRoomId,
},
/// Request to fetch profile information for the given user ID.
GetUserProfile {
user_id: OwnedUserId,
/// * If `Some`, the user is known to be a member of a room, so this will
/// fetch the user's profile from that room's membership info.
/// * If `None`, the user's profile info will be fetched from the server
/// in a room-agnostic manner, and no room membership info will be returned.
room_id: Option<OwnedRoomId>,
/// * If `true` (not recommended), only the local cache will be accessed.
/// * If `false` (recommended), details will be fetched from the server.
local_only: bool,
},
/// Request to fetch the number of unread messages in the given room.
GetNumberUnreadMessages {
room_id: OwnedRoomId,
},
/// Request to ignore/block or unignore/unblock a user.
IgnoreUser {
/// Whether to ignore (`true`) or unignore (`false`) the user.
ignore: bool,
/// The room membership info of the user to (un)ignore.
room_member: RoomMember,
/// The room ID of the room where the user is a member,
/// which is only needed because it isn't present in the `RoomMember` object.
room_id: OwnedRoomId,
},
/// Request to resolve a room alias into a room ID and the servers that know about that room.
ResolveRoomAlias(OwnedRoomAliasId),
/// Request to fetch an Avatar image from the server.
/// Upon completion of the async media request, the `on_fetched` function
/// will be invoked with the content of an `AvatarUpdate`.
FetchAvatar {
mxc_uri: OwnedMxcUri,
on_fetched: fn(AvatarUpdate),
},
/// Request to fetch media from the server.
/// Upon completion of the async media request, the `on_fetched` function
/// will be invoked with four arguments: the `destination`, the `media_request`,
/// the result of the media fetch, and the `update_sender`.
FetchMedia {
media_request: MediaRequest,
on_fetched: OnMediaFetchedFn,
destination: Arc<Mutex<MediaCacheEntry>>,
update_sender: Option<crossbeam_channel::Sender<TimelineUpdate>>,
},
/// Request to send a message to the given room.
SendMessage {
room_id: OwnedRoomId,
message: RoomMessageEventContent,
replied_to: Option<RepliedToInfo>,
},
/// Sends a notice to the given room that the current user is or is not typing.
///
/// This request does not return a response or notify the UI thread, and
/// furthermore, there is no need to send a follow-up request to stop typing
/// (though you certainly can do so).
SendTypingNotice {
room_id: OwnedRoomId,
typing: bool,
},
/// Spawn an async task to login to the given Matrix homeserver using the given SSO identity provider ID.
///
/// While an SSO request is in flight, the login screen will temporarily prevent the user
/// from submitting another redundant request, until this request has succeeded or failed.
SpawnSSOServer{
brand: String,
homeserver_url: String,
identity_provider_id: String,
},
/// Subscribe to typing notices for the given room.
///
/// This request does not return a response or notify the UI thread.
SubscribeToTypingNotices {
room_id: OwnedRoomId,
/// Whether to subscribe or unsubscribe from typing notices for this room.
subscribe: bool,
},
/// Subscribe to changes in the read receipts of our own user.
///
/// This request does not return a response or notify the UI thread.
SubscribeToOwnUserReadReceiptsChanged {
room_id: OwnedRoomId,
/// Whether to subscribe or unsubscribe to changes in the read receipts of our own user for this room
subscribe: bool,
},
/// Sends a read receipt for the given event in the given room.
ReadReceipt {
room_id: OwnedRoomId,
event_id: OwnedEventId,
},
/// Sends a fully-read receipt for the given event in the given room.
FullyReadReceipt {
room_id: OwnedRoomId,
event_id: OwnedEventId,
},
/// Sends a request checking if the currently logged-in user can send a message to the given room.
///
/// The response is delivered back to the main UI thread via a `TimelineUpdate::CanUserSendMessage`.
CheckCanUserSendMessage {
room_id: OwnedRoomId,
}
}
/// Submits a request to the worker thread to be executed asynchronously.
pub fn submit_async_request(req: MatrixRequest) {
REQUEST_SENDER.get()
.unwrap() // this is initialized
.send(req)
.expect("BUG: async worker task receiver has died!");
}
/// Details of a login request that get submitted within [`MatrixRequest::Login`].
pub enum LoginRequest{
LoginByPassword(LoginByPassword),
LoginBySSOSuccess(Client, ClientSessionPersisted),
LoginByCli,
HomeserverLoginTypesQuery(String),
}
/// Information needed to log in to a Matrix homeserver.
pub struct LoginByPassword {
pub user_id: String,
pub password: String,
pub homeserver: Option<String>,
}
/// The entry point for an async worker thread that can run async tasks.
///
/// All this thread does is wait for [`MatrixRequests`] from the main UI-driven non-async thread(s)
/// and then executes them within an async runtime context.
async fn async_worker(
mut request_receiver: UnboundedReceiver<MatrixRequest>,
login_sender: Sender<LoginRequest>,
) -> Result<()> {
log!("Started async_worker task.");
let subscribe_to_current_user_read_receipt_changed: std::sync::Arc<tokio::sync::Mutex<BTreeMap<OwnedRoomId, bool>>> = Arc::new(tokio::sync::Mutex::new(BTreeMap::new()));
while let Some(request) = request_receiver.recv().await {
match request {
MatrixRequest::Login(login_request) => {
if let Err(e) = login_sender.send(login_request).await {
error!("Error sending login request to login_sender: {e:?}");
Cx::post_action(LoginAction::LoginFailure(String::from(
"BUG: failed to send login request to async worker thread."
)));
}
}
MatrixRequest::PaginateRoomTimeline { room_id, num_events, direction } => {
let (timeline, sender) = {
let mut all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get_mut(&room_id) else {
log!("Skipping pagination request for not-yet-known room {room_id}");
continue;
};
let timeline_ref = room_info.timeline.clone();
let sender = room_info.timeline_update_sender.clone();
(timeline_ref, sender)
};
// Spawn a new async task that will make the actual pagination request.
let _paginate_task = Handle::current().spawn(async move {
log!("Starting {direction} pagination request for room {room_id}...");
sender.send(TimelineUpdate::PaginationRunning(direction)).unwrap();
SignalToUI::set_ui_signal();
let res = if direction == PaginationDirection::Forwards {
timeline.focused_paginate_forwards(num_events).await
} else {
timeline.paginate_backwards(num_events).await
};
match res {
Ok(fully_paginated) => {
log!("Completed {direction} pagination request for room {room_id}, hit {} of timeline? {}",
if direction == PaginationDirection::Forwards { "end" } else { "start" },
if fully_paginated { "yes" } else { "no" },
);
sender.send(TimelineUpdate::PaginationIdle {
fully_paginated,
direction,
}).unwrap();
SignalToUI::set_ui_signal();
}
Err(error) => {
error!("Error sending {direction} pagination request for room {room_id}: {error:?}");
sender.send(TimelineUpdate::PaginationError {
error,
direction,
}).unwrap();
SignalToUI::set_ui_signal();
}
}
});
}
MatrixRequest::FetchDetailsForEvent { room_id, event_id } => {
let (timeline, sender) = {
let mut all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get_mut(&room_id) else {
log!("BUG: room info not found for fetch details for event request {room_id}");
continue;
};
(room_info.timeline.clone(), room_info.timeline_update_sender.clone())
};
// Spawn a new async task that will make the actual fetch request.
let _fetch_task = Handle::current().spawn(async move {
// log!("Sending request to fetch details for event {event_id} in room {room_id}...");
let result = timeline.fetch_details_for_event(&event_id).await;
match result {
Ok(_) => {
// log!("Successfully fetched details for event {event_id} in room {room_id}.");
}
Err(ref _e) => {
// error!("Error fetching details for event {event_id} in room {room_id}: {e:?}");
}
}
sender.send(TimelineUpdate::EventDetailsFetched {
event_id,
result,
}).unwrap();
SignalToUI::set_ui_signal();
});
}
MatrixRequest::FetchRoomMembers { room_id } => {
let (timeline, sender) = {
let all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get(&room_id) else {
log!("BUG: room info not found for fetch members request {room_id}");
continue;
};
(room_info.timeline.clone(), room_info.timeline_update_sender.clone())
};
// Spawn a new async task that will make the actual fetch request.
let _fetch_task = Handle::current().spawn(async move {
log!("Sending fetch room members request for room {room_id}...");
timeline.fetch_members().await;
log!("Completed fetch room members request for room {room_id}.");
sender.send(TimelineUpdate::RoomMembersFetched).unwrap();
SignalToUI::set_ui_signal();
});
}
MatrixRequest::GetUserProfile { user_id, room_id, local_only } => {
let Some(client) = CLIENT.get() else { continue };
let _fetch_task = Handle::current().spawn(async move {
log!("Sending get user profile request: user: {user_id}, \
room: {room_id:?}, local_only: {local_only}...",
);
let mut update = None;
if let Some(room_id) = room_id.as_ref() {
if let Some(room) = client.get_room(room_id) {
let member = if local_only {
room.get_member_no_sync(&user_id).await
} else {
room.get_member(&user_id).await
};
if let Ok(Some(room_member)) = member {
update = Some(UserProfileUpdate::Full {
new_profile: UserProfile {
username: room_member.display_name().map(|u| u.to_owned()),
user_id: user_id.clone(),
avatar_state: AvatarState::Known(room_member.avatar_url().map(|u| u.to_owned())),
},
room_id: room_id.to_owned(),
room_member,
});
} else {
log!("User profile request: user {user_id} was not a member of room {room_id}");
}
} else {
log!("User profile request: client could not get room with ID {room_id}");
}
}
if !local_only {
if update.is_none() {
if let Ok(response) = client.account().fetch_user_profile_of(&user_id).await {
update = Some(UserProfileUpdate::UserProfileOnly(
UserProfile {
username: response.displayname,
user_id: user_id.clone(),
avatar_state: AvatarState::Known(response.avatar_url),
}
));
} else {
log!("User profile request: client could not get user with ID {user_id}");
}
}
match update.as_mut() {
Some(UserProfileUpdate::Full { new_profile: UserProfile { username, .. }, .. }) if username.is_none() => {
if let Ok(response) = client.account().fetch_user_profile_of(&user_id).await {
*username = response.displayname;
}
}
_ => { }
}
}
if let Some(upd) = update {
log!("Successfully completed get user profile request: user: {user_id}, room: {room_id:?}, local_only: {local_only}.");
enqueue_user_profile_update(upd);
} else {
log!("Failed to get user profile: user: {user_id}, room: {room_id:?}, local_only: {local_only}.");
}
});
}
MatrixRequest::GetNumberUnreadMessages { room_id } => {
let (timeline, sender) = {
let mut all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get_mut(&room_id) else {
log!("Skipping get number of unread messages request for not-yet-known room {room_id}");
continue;
};
(room_info.timeline.clone(), room_info.timeline_update_sender.clone())
};
let _get_unreads_task = Handle::current().spawn(async move {
match sender.send(TimelineUpdate::NewUnreadMessagesCount(
UnreadMessageCount::Known(timeline.room().num_unread_messages())
)) {
Ok(_) => SignalToUI::set_ui_signal(),
Err(e) => log!("Failed to send timeline update: {e:?} for GetNumberUnreadMessages request for room {room_id}"),
}
enqueue_rooms_list_update(RoomsListUpdate::UpdateNumUnreadMessages {
room_id: room_id.clone(),
count: UnreadMessageCount::Known(timeline.room().num_unread_messages())
});
});
}
MatrixRequest::IgnoreUser { ignore, room_member, room_id } => {
let Some(client) = CLIENT.get() else { continue };
let _ignore_task = Handle::current().spawn(async move {
let user_id = room_member.user_id();
log!("Sending request to {}ignore user: {user_id}...", if ignore { "" } else { "un" });
let ignore_result = if ignore {
room_member.ignore().await
} else {
room_member.unignore().await
};
log!("{} user {user_id} {}",
if ignore { "Ignoring" } else { "Unignoring" },
if ignore_result.is_ok() { "succeeded." } else { "failed." },
);
if ignore_result.is_err() {
return;
}
// We need to re-acquire the `RoomMember` object now that its state
// has changed, i.e., the user has been (un)ignored.
// We then need to send an update to replace the cached `RoomMember`
// with the now-stale ignored state.
if let Some(room) = client.get_room(&room_id) {
if let Ok(Some(new_room_member)) = room.get_member(user_id).await {
log!("Enqueueing user profile update for user {user_id}, who went from {}ignored to {}ignored.",
if room_member.is_ignored() { "" } else { "un" },
if new_room_member.is_ignored() { "" } else { "un" },
);
enqueue_user_profile_update(UserProfileUpdate::RoomMemberOnly {
room_id: room_id.clone(),
room_member: new_room_member,
});
}
}
// After successfully (un)ignoring a user, all timelines are fully cleared by the Matrix SDK.
// Therefore, we need to re-fetch all timelines for all rooms,
// and currently the only way to actually accomplish this is via pagination.
// See: <https://github.com/matrix-org/matrix-rust-sdk/issues/1703#issuecomment-2250297923>
//
// Note that here we only proactively re-paginate the *current* room
// (the one being viewed by the user when this ignore request was issued),
// and all other rooms will be re-paginated in `handle_ignore_user_list_subscriber()`.`
submit_async_request(MatrixRequest::PaginateRoomTimeline {
room_id,
num_events: 50,
direction: PaginationDirection::Backwards,
});
});
}
MatrixRequest::SendTypingNotice { room_id, typing } => {
let Some(room) = CLIENT.get().and_then(|c| c.get_room(&room_id)) else {
error!("BUG: client/room not found for typing notice request {room_id}");
continue;
};
let _typing_task = Handle::current().spawn(async move {
if let Err(e) = room.typing_notice(typing).await {
error!("Failed to send typing notice to room {room_id}: {e:?}");
}
});
}
MatrixRequest::SubscribeToTypingNotices { room_id, subscribe } => {
let (room, timeline_update_sender, mut typing_notice_receiver) = {
let mut all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get_mut(&room_id) else {
log!("BUG: room info not found for subscribe to typing notices request, room {room_id}");
continue;
};
let (room, recv) = if subscribe {
if room_info.typing_notice_subscriber.is_some() {
warning!("Note: room {room_id} is already subscribed to typing notices.");
continue;
} else {
let Some(room) = CLIENT.get().and_then(|c| c.get_room(&room_id)) else {
error!("BUG: client/room not found when subscribing to typing notices request, room: {room_id}");
continue;
};
let (drop_guard, recv) = room.subscribe_to_typing_notifications();
room_info.typing_notice_subscriber = Some(drop_guard);
(room, recv)
}
} else {
room_info.typing_notice_subscriber.take();
continue;
};
// Here: we don't have an existing subscriber running, so we fall through and start one.
(room, room_info.timeline_update_sender.clone(), recv)
};
let _typing_notices_task = Handle::current().spawn(async move {
while let Ok(user_ids) = typing_notice_receiver.recv().await {
// log!("Received typing notifications for room {room_id}: {user_ids:?}");
let mut users = Vec::with_capacity(user_ids.len());
for user_id in user_ids {
users.push(
room.get_member_no_sync(&user_id)
.await
.ok()
.flatten()
.and_then(|m| m.display_name().map(|d| d.to_owned()))
.unwrap_or_else(|| user_id.to_string())
);
}
if let Err(e) = timeline_update_sender.send(TimelineUpdate::TypingUsers { users }) {
error!("Error: timeline update sender couldn't send the list of typing users: {e:?}");
}
SignalToUI::set_ui_signal();
}
// log!("Note: typing notifications recv loop has ended for room {}", room_id);
});
}
MatrixRequest::SubscribeToOwnUserReadReceiptsChanged { room_id, subscribe } => {
let (timeline, sender) = {
let mut all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get_mut(&room_id) else {
log!("BUG: room info not found for subscribe to own user read receipts changed request, room {room_id}");
continue;
};
(room_info.timeline.clone(), room_info.timeline_update_sender.clone())
};
let subscribe_to_current_user_read_receipt_changed = subscribe_to_current_user_read_receipt_changed.clone();
let _to_updates_task = Handle::current().spawn(async move {
let update_receiver = timeline.subscribe_own_user_read_receipts_changed().await;
let read_receipt_change_mutex = subscribe_to_current_user_read_receipt_changed.clone();
let mut read_receipt_change_mutex_guard = read_receipt_change_mutex.lock().await;
if let Some(subscription) = read_receipt_change_mutex_guard.get(&room_id) {
if *subscription && subscribe {
return
}
} else if subscribe {
read_receipt_change_mutex_guard.insert(room_id.clone(), true);
}
pin_mut!(update_receiver);
if let Some(client_user_id) = current_user_id() {
if let Some((event_id, receipt)) = timeline.latest_user_read_receipt(&client_user_id).await {
log!("Received own user read receipt: {receipt:?} {event_id:?}");
if let Err(e) = sender.send(TimelineUpdate::OwnUserReadReceipt(receipt)) {
error!("Failed to get own user read receipt: {e:?}");
}
}
while (update_receiver.next().await).is_some() {
let read_receipt_change = subscribe_to_current_user_read_receipt_changed.clone();
let read_receipt_change = read_receipt_change.lock().await;
let Some(subscribed_to_user_read_receipt) = read_receipt_change.get(&room_id) else { continue; };
if !subscribed_to_user_read_receipt {
break;
}
if let Some((_, receipt)) = timeline.latest_user_read_receipt(&client_user_id).await {
if let Err(e) = sender.send(TimelineUpdate::OwnUserReadReceipt(receipt)) {
error!("Failed to get own user read receipt: {e:?}");
}
}
}
}
});
}
MatrixRequest::SpawnSSOServer { brand, homeserver_url, identity_provider_id} => {
spawn_sso_server(brand, homeserver_url, identity_provider_id, login_sender.clone()).await;
}
MatrixRequest::ResolveRoomAlias(room_alias) => {
let Some(client) = CLIENT.get() else { continue };
let _resolve_task = Handle::current().spawn(async move {
log!("Sending resolve room alias request for {room_alias}...");
let res = client.resolve_room_alias(&room_alias).await;
log!("Resolved room alias {room_alias} to: {res:?}");
todo!("Send the resolved room alias back to the UI thread somehow.");
});
}
MatrixRequest::FetchAvatar { mxc_uri, on_fetched } => {
let Some(client) = CLIENT.get() else { continue };
let _fetch_task = Handle::current().spawn(async move {
// log!("Sending fetch avatar request for {mxc_uri:?}...");
let media_request = MediaRequest {
source: MediaSource::Plain(mxc_uri.clone()),
format: MEDIA_THUMBNAIL_FORMAT.into(),
};
let res = client.media().get_media_content(&media_request, true).await;
// log!("Fetched avatar for {mxc_uri:?}, succeeded? {}", res.is_ok());
on_fetched(AvatarUpdate { mxc_uri, avatar_data: res.map(|v| v.into()) });
});
}
MatrixRequest::FetchMedia { media_request, on_fetched, destination, update_sender } => {
let Some(client) = CLIENT.get() else { continue };
let media = client.media();
let _fetch_task = Handle::current().spawn(async move {
// log!("Sending fetch media request for {media_request:?}...");
let res = media.get_media_content(&media_request, true).await;
on_fetched(&destination, media_request, res, update_sender);
});
}
MatrixRequest::SendMessage { room_id, message, replied_to } => {
let timeline = {
let all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get(&room_id) else {
log!("BUG: room info not found for send message request {room_id}");
continue;
};
room_info.timeline.clone()
};
// Spawn a new async task that will send the actual message.
let _send_message_task = Handle::current().spawn(async move {
log!("Sending message to room {room_id}: {message:?}...");
if let Some(replied_to_info) = replied_to {
match timeline.send_reply(message.into(), replied_to_info, ForwardThread::Yes).await {
Ok(_send_handle) => log!("Sent reply message to room {room_id}."),
Err(_e) => {
error!("Failed to send reply message to room {room_id}: {_e:?}");
enqueue_popup_notification(format!("Failed to send reply: {_e}"));
}
}
} else {
match timeline.send(message.into()).await {
Ok(_send_handle) => log!("Sent message to room {room_id}."),
Err(_e) => {
error!("Failed to send message to room {room_id}: {_e:?}");
enqueue_popup_notification(format!("Failed to send message: {_e}"));
}
}
}
SignalToUI::set_ui_signal();
});
}
MatrixRequest::ReadReceipt { room_id, event_id } => {
let timeline = {
let all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get(&room_id) else {
log!("BUG: room info not found when sending read receipt, room {room_id}, {event_id}");
continue;
};
room_info.timeline.clone()
};
let _send_rr_task = Handle::current().spawn(async move {
match timeline.send_single_receipt(ReceiptType::Read, ReceiptThread::Unthreaded, event_id.clone()).await {
Ok(sent) => log!("{} read receipt to room {room_id} for event {event_id}", if sent { "Sent" } else { "Already sent" }),
Err(_e) => error!("Failed to send read receipt to room {room_id} for event {event_id}; error: {_e:?}"),
}
// Also update the number of unread messages in the room.
enqueue_rooms_list_update(RoomsListUpdate::UpdateNumUnreadMessages {
room_id: room_id.clone(),
count: UnreadMessageCount::Known(timeline.room().num_unread_messages())
});
});
},
MatrixRequest::FullyReadReceipt { room_id, event_id, .. } => {
let timeline = {
let all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get(&room_id) else {
log!("BUG: room info not found when sending fully read receipt, room {room_id}, {event_id}");
continue;
};
room_info.timeline.clone()
};
let _send_frr_task = Handle::current().spawn(async move {
match timeline.send_single_receipt(ReceiptType::FullyRead, ReceiptThread::Unthreaded, event_id.clone()).await {
Ok(sent) => log!("{} fully read receipt to room {room_id} for event {event_id}",
if sent { "Sent" } else { "Already sent" }
),
Err(_e) => error!("Failed to send fully read receipt to room {room_id} for event {event_id}; error: {_e:?}"),
}
// Also update the number of unread messages in the room.
enqueue_rooms_list_update(RoomsListUpdate::UpdateNumUnreadMessages {
room_id: room_id.clone(),
count: UnreadMessageCount::Known(timeline.room().num_unread_messages())
});
});
},
MatrixRequest::CheckCanUserSendMessage { room_id } => {
let (timeline, sender) = {
let all_room_info = ALL_ROOM_INFO.lock().unwrap();
let Some(room_info) = all_room_info.get(&room_id) else {
log!("BUG: room info not found for fetch members request {room_id}");
continue;
};
(room_info.timeline.clone(), room_info.timeline_update_sender.clone())
};
let Some(user_id) = current_user_id() else { continue };
let _check_can_user_send_message_task = Handle::current().spawn(async move {
let can_user_send_message = timeline.room().can_user_send_message(
&user_id,
MessageLikeEventType::Message
)
.await
.unwrap_or(true);
if let Err(e) = sender.send(TimelineUpdate::CanUserSendMessage(can_user_send_message)) {
error!("Failed to send the result of if user can send message: {e}")
}
});
}
}
}
error!("async_worker task ended unexpectedly");
bail!("async_worker task ended unexpectedly")
}
/// The single global Tokio runtime that is used by all async tasks.
static TOKIO_RUNTIME: OnceLock<tokio::runtime::Runtime> = OnceLock::new();
/// The sender used by [`submit_async_request`] to send requests to the async worker thread.
/// Currently there is only one, but it can be cloned if we need more concurrent senders.
static REQUEST_SENDER: OnceLock<UnboundedSender<MatrixRequest>> = OnceLock::new();
pub fn start_matrix_tokio() -> Result<()> {
// Create a Tokio runtime, and save it in a static variable to ensure it isn't dropped.
let rt = TOKIO_RUNTIME.get_or_init(|| tokio::runtime::Runtime::new().unwrap());
// Create a channel to be used between UI thread(s) and the async worker thread.
let (sender, receiver) = tokio::sync::mpsc::unbounded_channel::<MatrixRequest>();
REQUEST_SENDER.set(sender).expect("BUG: REQUEST_SENDER already set!");
let (login_sender, login_receiver) = tokio::sync::mpsc::channel(1);
// Start a high-level async task that will start and monitor all other tasks.
let _monitor = rt.spawn(async move {
// Spawn the actual async worker thread.
let mut worker_join_handle = rt.spawn(async_worker(receiver, login_sender));
// Start the main loop that drives the Matrix client SDK.
let mut main_loop_join_handle = rt.spawn(async_main_loop(login_receiver));
#[allow(clippy::never_loop)] // unsure if needed, just following tokio's examples.
loop {
tokio::select! {
result = &mut main_loop_join_handle => {
match result {
Ok(Ok(())) => {
error!("BUG: main async loop task ended unexpectedly!");
}
Ok(Err(e)) => {
error!("Error: main async loop task ended:\n\t{e:?}");
rooms_list::enqueue_rooms_list_update(RoomsListUpdate::Status {
status: e.to_string(),
});
enqueue_popup_notification(format!("Rooms list update error: {e}"));
},
Err(e) => {
error!("BUG: failed to join main async loop task: {e:?}");
}
}
break;
}
result = &mut worker_join_handle => {
match result {
Ok(Ok(())) => {
error!("BUG: async worker task ended unexpectedly!");
}
Ok(Err(e)) => {
error!("Error: async worker task ended:\n\t{e:?}");
rooms_list::enqueue_rooms_list_update(RoomsListUpdate::Status {
status: e.to_string(),
});
enqueue_popup_notification(format!("Rooms list update error: {e}"));
},
Err(e) => {
error!("BUG: failed to join async worker task: {e:?}");
}
}
break;
}
}
}