Files
umbrix/lib/features/log/overview/logs_overview_notifier.dart

147 lines
3.7 KiB
Dart
Raw Normal View History

2023-07-06 17:18:41 +03:30
import 'dart:async';
2023-11-28 18:24:31 +03:30
import 'package:hiddify/features/log/data/log_data_providers.dart';
import 'package:hiddify/features/log/model/log_entity.dart';
import 'package:hiddify/features/log/model/log_level.dart';
import 'package:hiddify/features/log/overview/logs_overview_state.dart';
import 'package:hiddify/utils/riverpod_utils.dart';
2023-07-06 17:18:41 +03:30
import 'package:hiddify/utils/utils.dart';
import 'package:riverpod_annotation/riverpod_annotation.dart';
import 'package:rxdart/rxdart.dart';
2023-07-06 17:18:41 +03:30
2023-11-28 18:24:31 +03:30
part 'logs_overview_notifier.g.dart';
2023-07-06 17:18:41 +03:30
@riverpod
2023-11-28 18:24:31 +03:30
class LogsOverviewNotifier extends _$LogsOverviewNotifier with AppLogger {
2023-07-06 17:18:41 +03:30
@override
2023-11-28 18:24:31 +03:30
LogsOverviewState build() {
ref.disposeDelay(const Duration(seconds: 20));
2023-11-28 18:24:31 +03:30
state = const LogsOverviewState();
ref.onDispose(
() {
loggy.debug("disposing");
_listener?.cancel();
_listener = null;
},
);
ref.onCancel(
() {
if (_listener?.isPaused != true) {
loggy.debug("pausing");
_listener?.pause();
}
2023-07-06 17:18:41 +03:30
},
);
ref.onResume(
() {
if (!state.paused && (_listener?.isPaused ?? false)) {
loggy.debug("resuming");
_listener?.resume();
}
},
);
_addListeners();
2023-11-28 18:24:31 +03:30
return const LogsOverviewState();
2023-07-06 17:18:41 +03:30
}
StreamSubscription? _listener;
Future<void> _addListeners() async {
loggy.debug("adding listeners");
await _listener?.cancel();
_listener = ref
2023-11-28 18:24:31 +03:30
.read(logRepositoryProvider)
.requireValue
.watchLogs()
.throttle(
(_) => Stream.value(_listener?.isPaused ?? false),
leading: false,
trailing: true,
)
.throttleTime(
const Duration(milliseconds: 250),
leading: false,
trailing: true,
)
.asyncMap(
(event) async {
await event.fold(
(f) {
_logs = [];
state = state.copyWith(logs: AsyncError(f, StackTrace.current));
},
(a) async {
_logs = a.reversed;
state = state.copyWith(logs: AsyncData(await _computeLogs()));
},
);
},
).listen((event) {});
}
2023-11-28 18:24:31 +03:30
Iterable<LogEntity> _logs = [];
2023-07-06 17:18:41 +03:30
final _debouncer = CallbackDebouncer(const Duration(milliseconds: 200));
LogLevel? _levelFilter;
String _filter = "";
2023-11-28 18:24:31 +03:30
Future<List<LogEntity>> _computeLogs() async {
if (_levelFilter == null && _filter.isEmpty) return _logs.toList();
return _logs.where((e) {
return (_filter.isEmpty || e.message.contains(_filter)) &&
(_levelFilter == null ||
e.level == null ||
e.level!.index >= _levelFilter!.index);
2023-07-06 17:18:41 +03:30
}).toList();
}
void pause() {
loggy.debug("pausing");
_listener?.pause();
state = state.copyWith(paused: true);
}
void resume() {
loggy.debug("resuming");
_listener?.resume();
state = state.copyWith(paused: false);
}
Future<void> clear() async {
loggy.debug("clearing");
2023-11-28 18:24:31 +03:30
await ref.read(logRepositoryProvider).requireValue.clearLogs().match(
(l) {
loggy.warning("error clearing logs", l);
},
(_) {
_logs = [];
state = state.copyWith(logs: const AsyncData([]));
},
).run();
2023-07-06 17:18:41 +03:30
}
void filterMessage(String? filter) {
_filter = filter ?? '';
_debouncer(
() async {
if (state.logs case AsyncData()) {
state = state.copyWith(
filter: _filter,
logs: AsyncData(await _computeLogs()),
);
2023-07-06 17:18:41 +03:30
}
},
);
}
Future<void> filterLevel(LogLevel? level) async {
_levelFilter = level;
if (state.logs case AsyncData()) {
state = state.copyWith(
levelFilter: _levelFilter,
logs: AsyncData(await _computeLogs()),
);
2023-07-06 17:18:41 +03:30
}
}
}