refactored internal documentation and simplified comments across chat BLoCs, file viewer, and navigation components

This commit is contained in:
2026-05-10 17:01:50 +02:00
parent a0bc46f522
commit 1a11b9ac60
8 changed files with 68 additions and 185 deletions
+17 -42
View File
@@ -28,18 +28,11 @@ class ChatBloc
int _lastKnownMessageId = 0;
bool _appResumed = true;
/// Distinguishes "the bloc tracks a chat the user has open" from "the
/// bloc remembers the last opened chat". App-resume only refreshes when
/// true — otherwise we'd silently mark a long-since-left chat as read
/// on the server. Can't reuse `currentToken` for this signal because
/// clearing it on leaveChat raced with setToken-from-didPopNext when
/// popping a stacked chat.
/// True only while a ChatView is mounted. Can't reuse `currentToken` —
/// clearing it on leaveChat races with setToken from didPopNext when
/// popping a stacked chat, causing spurious server read-markers on resume.
bool _chatViewActive = false;
/// True only while a ChatView is actually mounted and tracking its room.
/// Read by the notification controller to decide whether an incoming push
/// belongs to the chat the user is currently looking at — `currentToken`
/// alone would yield false-positives for the last opened chat.
bool get hasOpenChat => _chatViewActive;
DateTime _lastTokenSet = DateTime.fromMillisecondsSinceEpoch(0);
@@ -100,18 +93,15 @@ class ChatBloc
add(Emit((s) => s.copyWith(referenceMessageId: messageId)));
}
/// Token-aware: only acts when the bloc still points at [fromToken].
/// When popping a stacked chat (notification opened B over A), A's
/// didPopNext has already run setToken(A) by the time B's dispose
/// fires — at that point currentToken is A and we must leave it alone.
/// No-op when the bloc has already moved on to a different token: when
/// popping a stacked chat (B over A), A's didPopNext runs setToken(A)
/// before B's dispose fires.
void leaveChat(String fromToken) {
if ((innerState?.currentToken ?? '') != fromToken) return;
_chatViewActive = false;
_stopLongPoll();
}
/// Fire-and-forget server-side read-marker. Exposed so view-side
/// callers (long-press menu, ChatView dispose) hit the same path.
Future<void> sendServerReadMarker(String token, int messageId) async {
try {
await SetReadMarker(
@@ -137,10 +127,9 @@ class ChatBloc
if (token.isNotEmpty && _chatViewActive) refresh();
}
/// Defer _loadChat by one microtask so the Bloc worker processes the
/// preceding Emit/RefetchStarted before any cache/network callback
/// fires — otherwise a quick cache hit can run with the previous
/// token in state, fail stillCurrent(), and never emit a DataGathered.
/// Microtask hop so the Bloc worker drains the preceding Emit before
/// any cache callback fires — a quick cache hit otherwise runs with
/// the previous token in state and fails stillCurrent().
void _scheduleLoad(String token) {
Future<void>.microtask(() {
if (isClosed) return;
@@ -164,20 +153,15 @@ class ChatBloc
token: token,
onCacheData: (data) {
if (!stillCurrent()) return;
// Only paint cache when the state is empty — restoring a stale
// disk snapshot over already-merged long-poll data would visibly
// drop those messages until the network call resolves.
// Skip cache paint over already-merged long-poll data — would
// visibly drop those messages until the network call resolves.
if (innerState?.chatResponse != null) return;
add(Emit((s) => s.copyWith(chatResponse: data)));
},
onNetworkData: (data) {
// Server-side mark runs unconditionally with the freshly-fetched
// maxId — skipping it on stillCurrent==false would leave the
// server cursor wherever a quick navigation away left it. The
// cache check below avoids a redundant POST when the long-poll
// (setReadMarker=on) or a previous open already moved the cursor
// to this exact id; without it every chat-open did one extra
// round-trip even when there was nothing to mark.
// Mark runs even if no longer current — otherwise a quick
// navigation away leaves the server cursor stale. Cache check
// skips the POST when the cursor is already at maxId.
final maxId = _maxMessageId(data);
if (maxId > 0) {
final cached = _chatListBloc?.lastReadMessageFor(token);
@@ -210,10 +194,6 @@ class ChatBloc
}
}
// ---------------------------------------------------------------------------
// Long-poll loop
// ---------------------------------------------------------------------------
void _startLongPoll(String token) {
if (!_appResumed) return;
if (_pollingToken == token) return;
@@ -253,8 +233,7 @@ class ChatBloc
_applyChatResponse(response);
final maxId = _maxMessageId(response);
if (maxId > _lastKnownMessageId) _lastKnownMessageId = maxId;
// Long-poll's setReadMarker=on already moved the server cursor;
// mirror locally.
// Long-poll's setReadMarker=on moved the server cursor; mirror locally.
final preview = _pickDisplayMessage(response);
if (preview != null) {
_chatListBloc?.applyIncomingMessage(token, preview);
@@ -270,10 +249,7 @@ class ChatBloc
}
}
/// Merges [incoming] into the existing chatResponse and emits as a
/// fresh fetch. Dedups by id (newer wins, so server edits/deletes
/// propagate). Shared by initial-load and long-poll so neither wipes
/// messages the other already committed.
/// Dedups by id with newer-wins so server edits/deletes propagate.
void _applyChatResponse(GetChatResponse incoming) {
final current = innerState?.chatResponse;
if (current == null) {
@@ -301,8 +277,7 @@ class ChatBloc
return max;
}
/// Highest-id message worth showing as the room preview — comments
/// and voice messages, matching what the server picks for `lastMessage`.
/// Mirrors the server's own `lastMessage` selection (comments + voice only).
GetChatResponseObject? _pickDisplayMessage(GetChatResponse response) {
GetChatResponseObject? best;
for (final m in response.data) {