relatica/lib/riverpod_controllers/entry_tree_item_services.dart

581 wiersze
18 KiB
Dart

import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:logging/logging.dart';
import 'package:path/path.dart' as p;
import 'package:result_monad/result_monad.dart';
import 'package:riverpod_annotation/riverpod_annotation.dart';
import 'package:stack_trace/stack_trace.dart';
import '../models/auth/profile.dart';
import '../models/entry_tree_item.dart';
import '../models/exec_error.dart';
import '../models/flattened_tree_item.dart';
import '../models/image_entry.dart';
import '../models/media_attachment_uploads/media_upload_attachment.dart';
import '../models/media_attachment_uploads/new_entry_media_items.dart';
import '../models/networking/paging_data.dart';
import '../models/timeline_entry.dart';
import '../models/timeline_identifiers.dart';
import '../models/visibility.dart';
import '../riverpod_controllers/networking/friendica_remote_file_client_services.dart';
import '../riverpod_controllers/networking/friendica_timelines_client_services.dart';
import '../utils/entry_tree_item_flattening.dart';
import '../utils/media_upload_attachment_helper.dart';
import 'networking/friendica_statuses_client_services.dart';
import 'timeline_entry_services.dart';
import 'timeline_services.dart';
part 'entry_tree_item_services.g.dart';
@Riverpod(keepAlive: true)
Map<String, String> _parentPostIds(Ref ref, Profile profile) {
return {};
}
@Riverpod(keepAlive: true)
Map<String, _Node> _postNodes(Ref ref, Profile profile) {
return {};
}
@Riverpod(keepAlive: true)
Map<String, EntryTreeItem> _entryTreeItems(Ref ref, Profile profile) => {};
final _pteLogger = Logger('PostTreeEntryByIdProvider');
@riverpod
Result<EntryTreeItem, ExecError> postTreeEntryById(
Ref ref, Profile profile, String id) {
_pteLogger.finest('Building for $id for $profile');
final isPostNode = ref.watch(_postNodesProvider(profile)).containsKey(id);
_pteLogger.finest('$id ${isPostNode ? "is a post" : "is a comment"}');
final postId =
isPostNode ? id : ref.watch(_parentPostIdsProvider(profile))[id];
if (postId == null) {
_pteLogger.finest('No post entry found for $id for $profile');
return buildErrorResult(
type: ErrorType.notFound, message: 'No post entry found for id: $id');
}
final entry = ref.watch(entryTreeManagerProvider(profile, postId));
_pteLogger.finest('Result from ETM: $entry');
return entry;
}
final _etmLogger = Logger('EntryTreeManagerProvider');
@Riverpod(keepAlive: true)
class EntryTreeManager extends _$EntryTreeManager {
var entryId = '';
@override
Result<EntryTreeItem, ExecError> build(Profile profile, String id) {
_etmLogger.finest('Building for $id for $profile');
entryId = id;
final entries = ref.watch(_entryTreeItemsProvider(profile));
final entry = entries[id];
if (entry == null) {
_etmLogger.finest('EntryTreeItem for $id for $profile not found');
return buildErrorResult(
type: ErrorType.notFound,
message: '$id not found',
);
}
_etmLogger.finest('Return entry for $id for $profile');
return Result.ok(entry);
}
Result<EntryTreeItem, ExecError> upsert(EntryTreeItem entry) {
_etmLogger.finest('Upserting entry for $id for $profile');
if (entry.id != entryId) {
return buildErrorResult(
type: ErrorType.argumentError,
message:
'Trying to add an entry to a provider that does not match the id: $id',
);
} else {
ref.read(_entryTreeItemsProvider(profile))[entryId] = entry;
}
if (state.isFailure || entry != state.value) {
state = Result.ok(entry);
}
return state;
}
void remove() {
_etmLogger.finest('Removing for $entryId for $profile');
ref.read(_entryTreeItemsProvider(profile)).remove(entryId);
ref.invalidateSelf();
}
Result<List<FlattenedTreeItem>, ExecError> flattened({
int level = FlatteningExtensions.baseLevel,
bool topLevelOnly = false,
}) {
if (state.isFailure) {
return state.errorCast();
}
return Result.ok(
state.value.flatten(
level: level,
topLevelOnly: topLevelOnly,
profile: profile,
ref: ref,
),
);
}
}
final _tluLogger = Logger('TimelineUpdater');
@riverpod
class TimelineUpdater extends _$TimelineUpdater {
@override
bool build(Profile profile) {
return true;
}
FutureResult<List<EntryTreeItem>, ExecError> updateTimeline(
TimelineIdentifiers type, int maxId, int sinceId) async {
_tluLogger.fine(() => 'Updating timeline');
final itemsResult = await ref.read(timelineProvider(profile,
type: type,
page: PagingData(
maxId: maxId > 0 ? maxId : null,
sinceId: sinceId > 0 ? sinceId : null,
)).future);
if (itemsResult.isFailure) {
_tluLogger.severe(
'Error getting timeline: ${itemsResult.error}',
Trace.current(),
);
return itemsResult.errorCast();
}
itemsResult.value.sort((t1, t2) => t1.id.compareTo(t2.id));
final updatedPosts = await _processNewItems(
itemsResult.value,
profile.userId,
);
return Result.ok(updatedPosts);
}
FutureResult<EntryTreeItem, ExecError> refreshStatusChain(String id) async {
_tluLogger.fine('Refreshing post: $id');
final postResult =
ref.read(postOrCommentProvider(profile, id, fullContext: false).future);
final contextResult =
ref.read(postOrCommentProvider(profile, id, fullContext: true).future);
final results = await Future.wait([postResult, contextResult]);
final hadError = results.map((r) => r.isFailure).reduce((e, s) => e && s);
final entries = results
.map((r) => r.getValueOrElse(() => []))
.expand((i) => i)
.toList();
if (entries.isNotEmpty) {
_cleanupEntriesForId(id);
await _processNewItems(entries, profile.userId);
}
if (hadError) {
return Result.error(results.firstWhere((r) => r.isFailure).error);
} else {
final resultFromProvider =
ref.read(postTreeEntryByIdProvider(profile, id));
return resultFromProvider;
}
}
FutureResult<bool, ExecError> deleteEntryById(String id) async {
_tluLogger.finest('Delete entry: $id');
final result = await ref
.read(deleteStatusEntryByIdProvider(profile, id).future)
.withResult((_) {
ref.read(entryTreeManagerProvider(profile, id).notifier).remove();
ref.read(timelineEntryManagerProvider(profile, id).notifier).remove();
_cleanupEntriesForId(id);
});
return result.execErrorCast();
}
void _cleanupEntriesForId(String id) {
final parentPostIds = ref.read(_parentPostIdsProvider(profile));
final postNodes = ref.read(_postNodesProvider(profile));
if (parentPostIds.containsKey(id)) {
final parentPostId = parentPostIds.remove(id);
final parentPostNode = postNodes[parentPostId];
ref.invalidate(timelineEntryManagerProvider(profile, parentPostId!));
parentPostNode?.removeChildById(id);
}
if (postNodes.containsKey(id)) {
postNodes.remove(id);
}
}
Future<List<EntryTreeItem>> _processNewItems(
List<TimelineEntry> items,
String currentId,
) async {
_tluLogger.fine('Processing new items: ${items.map((e) => e.id).toList()}');
items.sort((i1, i2) => int.parse(i1.id).compareTo(int.parse(i2.id)));
final allSeenItems = [...items];
for (final item in items) {
_tluLogger.finest('Upserting entry for ${item.id}');
ref
.read(timelineEntryManagerProvider(profile, item.id).notifier)
.upsert(item);
}
final orphans = <TimelineEntry>[];
for (final item in items) {
if (item.parentId.isEmpty) {
continue;
}
ref.read(timelineEntryManagerProvider(profile, item.parentId)).match(
onSuccess: (parent) {
if (parent.parentId.isEmpty) {
ref.read(_parentPostIdsProvider(profile))[item.id] = parent.id;
}
}, onError: (_) {
orphans.add(item);
});
}
for (final o in orphans) {
await ref
.read(postOrCommentProvider(profile, o.id, fullContext: true).future)
.andThenSuccessAsync((items) async {
final parentPostId = items.firstWhere((e) => e.parentId.isEmpty).id;
ref.read(_parentPostIdsProvider(profile))[o.id] = parentPostId;
allSeenItems.addAll(items);
for (final item in items) {
ref
.read(timelineEntryManagerProvider(profile, item.id).notifier)
.upsert(item);
ref.read(_parentPostIdsProvider(profile))[item.id] = parentPostId;
}
});
}
allSeenItems.sort((i1, i2) {
if (i1.parentId.isEmpty && i2.parentId.isNotEmpty) {
return -1;
}
if (i2.parentId.isEmpty && i1.parentId.isNotEmpty) {
return 1;
}
return int.parse(i1.id).compareTo(int.parse(i2.id));
});
final postNodesToReturn = <_Node>{};
var lastCount = 0;
final postNodes = ref.read(_postNodesProvider(profile));
final parentPostIds = ref.read(_parentPostIdsProvider(profile));
while (allSeenItems.isNotEmpty) {
final seenItemsCopy = [...allSeenItems];
for (final item in seenItemsCopy) {
if (item.parentId.isEmpty) {
final postNode = postNodes.putIfAbsent(item.id, () => _Node(item.id));
postNodesToReturn.add(postNode);
allSeenItems.remove(item);
} else {
final parentParentPostId = postNodes.containsKey(item.parentId)
? item.parentId
: parentPostIds[item.parentId];
if (postNodes[parentParentPostId] == null) {
_tluLogger.severe(
'Error finding parent ${item.parentId} for entry ${item.id}',
Trace.current(),
);
continue;
}
final parentPostNode = postNodes[parentParentPostId]!;
postNodesToReturn.add(parentPostNode);
parentPostIds[item.id] = parentPostNode.id;
if (parentPostNode.getChildById(item.id) == null) {
final newNode = _Node(item.id);
final injectionNode = parentPostNode.id == item.parentId
? parentPostNode
: parentPostNode.getChildById(item.parentId);
if (injectionNode == null) {
continue;
} else {
injectionNode.addChild(newNode);
}
}
allSeenItems.remove(item);
}
}
if (allSeenItems.isNotEmpty && allSeenItems.length == lastCount) {
_tluLogger.severe(
'Had ${allSeenItems.length} items left over after all iterations',
Trace.current(),
);
break;
}
lastCount = allSeenItems.length;
}
final updatedPosts = postNodesToReturn
.map((node) => _nodeToTreeItem(node, currentId))
.toList();
_tluLogger.finest('Completed processing new items');
return updatedPosts;
}
EntryTreeItem _nodeToTreeItem(_Node node, String currentId) {
final childenEntries = <String>[];
for (final c in node.children) {
childenEntries.add(c.id);
_nodeToTreeItem(c, currentId);
}
final entryId = node.id;
final isMine = ref
.read(timelineEntryManagerProvider(profile, entryId))
.fold(onSuccess: (t) => t.authorId, onError: (_) => '') ==
currentId;
final rval = EntryTreeItem(
entryId,
isMine: isMine,
initialChildren: childenEntries,
);
ref.read(entryTreeManagerProvider(profile, node.id).notifier).upsert(rval);
return rval;
}
}
final _swLogger = Logger('StatusWriter');
@riverpod
class StatusWriter extends _$StatusWriter {
@override
bool build(Profile profile) {
return true;
}
FutureResult<TimelineEntry, ExecError> createNewStatus(
Profile profile,
String text, {
String spoilerText = '',
String inReplyToId = '',
required NewEntryMediaItems mediaItems,
required List<ImageEntry> existingMediaItems,
required Visibility visibility,
}) async {
_swLogger.info('Creating new post: $text');
final mediaIds = existingMediaItems.map((m) => m.scales.first.id).toList();
for (final item in mediaItems.attachments) {
if (item.isExistingServerItem) {
continue;
}
final result = await _uploadMediaItems(
profile, item, mediaItems.albumName, visibility)
.withResult((newId) => mediaIds.add(newId));
if (result.isFailure) {
return Result.error(ExecError(
type: ErrorType.localError,
message: 'Error uploading image: ${result.error}'));
}
}
final result = await ref
.read(createNewStatusProvider(profile,
text: text,
spoilerText: spoilerText,
inReplyToId: inReplyToId,
mediaIds: mediaIds,
visibility: visibility)
.future)
.withResultAsync((item) async {
ref
.read(timelineEntryManagerProvider(profile, item.id).notifier)
.upsert(item);
})
.withResultAsync((item) async {
if (inReplyToId.isNotEmpty) {
late final String rootPostId;
if (ref
.read(_postNodesProvider(profile))
.containsKey(inReplyToId)) {
rootPostId = inReplyToId;
} else {
rootPostId =
ref.read(_parentPostIdsProvider(profile))[inReplyToId]!;
}
await ref
.read(timelineUpdaterProvider(profile).notifier)
.refreshStatusChain(rootPostId);
}
})
.withResultAsync((item) async {
await ref
.read(timelineMaintainerProvider(profile).notifier)
.loadNewerForPersonalTimelines();
})
.withResult((status) => _swLogger.finest('${status.id} status created'))
.withError((error) => _swLogger.finest('Error creating post: $error'));
return result.execErrorCast();
}
FutureResult<TimelineEntry, ExecError> editStatus(
Profile profile,
String statusId,
String text, {
String spoilerText = '',
required NewEntryMediaItems mediaItems,
required List<ImageEntry> existingMediaItems,
required Visibility newMediaItemVisibility,
}) async {
_swLogger.info('Editing post: $text');
final mediaIds = existingMediaItems
.map((m) => m.scales.isEmpty ? m.id : m.scales.first.id)
.toList();
for (final item in mediaItems.attachments) {
if (item.isExistingServerItem) {
continue;
}
final result = await _uploadMediaItems(
profile, item, mediaItems.albumName, newMediaItemVisibility)
.withResult((newId) => mediaIds.add(newId));
if (result.isFailure) {
return Result.error(ExecError(
type: ErrorType.localError,
message: 'Error uploading image: ${result.error}'));
}
}
final result = await ref
.read(editStatusProvider(profile,
id: statusId,
text: text,
spoilerText: spoilerText,
mediaIds: mediaIds)
.future)
.withResult((item) async {
ref
.read(timelineEntryManagerProvider(profile, item.id).notifier)
.upsert(item);
})
.withResult((item) async {
final inReplyToId = item.parentId;
if (inReplyToId.isNotEmpty) {
late final String rootPostId;
if (ref
.read(_postNodesProvider(profile))
.containsKey(inReplyToId)) {
rootPostId = inReplyToId;
} else {
rootPostId =
ref.read(_parentPostIdsProvider(profile))[inReplyToId]!;
}
await ref
.read(timelineUpdaterProvider(profile).notifier)
.refreshStatusChain(rootPostId);
}
})
.withResult((status) => _swLogger.finest('${status.id} status created'))
.withError((error) => _swLogger.finest('Error creating post: $error'));
return result.execErrorCast();
}
FutureResult<String, ExecError> _uploadMediaItems(
Profile profile,
MediaUploadAttachment item,
String albumName,
Visibility visibility,
) async {
final String extension = p.extension(item.localFilePath);
late final String filename;
if (item.remoteFilename.isEmpty) {
filename = p.basename(item.localFilePath);
} else {
if (item.remoteFilename.toLowerCase().endsWith(extension.toLowerCase())) {
filename = item.remoteFilename;
} else {
filename = "${item.remoteFilename}$extension";
}
}
final uploadResult =
await MediaUploadAttachmentHelper.getUploadableImageBytes(
item.localFilePath,
)
.andThenAsync((imageBytes) async =>
await ref.read(uploadFileAsAttachmentProvider(
profile,
bytes: imageBytes,
album: albumName,
description: item.description,
fileName: filename,
visibility: visibility,
).future))
.transform((v) => v.scales.first.id);
return uploadResult.execErrorCast();
}
}
class _Node {
final String id;
final _children = <String, _Node>{};
List<_Node> get children => _children.values.toList();
_Node(this.id, {Map<String, _Node>? initialChildren}) {
if (initialChildren != null) {
_children.addAll(initialChildren);
}
}
void addChild(_Node node) {
_children[node.id] = node;
}
void removeChildById(String id) {
if (_children.containsKey(id)) {
_children.remove(id);
}
for (final c in _children.values) {
c.removeChildById(id);
}
}
_Node? getChildById(String id) {
if (_children.containsKey(id)) {
return _children[id]!;
}
for (final c in _children.values) {
final result = c.getChildById(id);
if (result != null) {
return result;
}
}
return null;
}
@override
bool operator ==(Object other) =>
identical(this, other) ||
other is _Node && runtimeType == other.runtimeType && id == other.id;
@override
int get hashCode => id.hashCode;
}