1
0
mirror of https://github.com/immich-app/immich.git synced 2024-12-21 01:39:59 +02:00
immich/mobile/lib/shared/services/immich_logger.service.dart
Fynn Petersen-Frey d500ef77cf
feature(mobile): configurable log level (#2248)
* feature(mobile): configurable log level

* increase maxLogEntries to 500

---------

Co-authored-by: Fynn Petersen-Frey <zoodyy@users.noreply.github.com>
2023-04-14 08:50:46 -05:00

126 lines
3.8 KiB
Dart

import 'dart:async';
import 'dart:io';
import 'package:flutter/widgets.dart';
import 'package:immich_mobile/shared/models/logger_message.model.dart';
import 'package:immich_mobile/shared/models/store.dart';
import 'package:isar/isar.dart';
import 'package:logging/logging.dart';
import 'package:path_provider/path_provider.dart';
import 'package:share_plus/share_plus.dart';
/// [ImmichLogger] is a custom logger that is built on top of the [logging] package.
/// The logs are written to the database and onto console, using `debugPrint` method.
///
/// The logs are deleted when exceeding the `maxLogEntries` (default 200) property
/// in the class.
///
/// Logs can be shared by calling the `shareLogs` method, which will open a share dialog
/// and generate a csv file.
class ImmichLogger {
static final ImmichLogger _instance = ImmichLogger._internal();
final maxLogEntries = 500;
final Isar _db = Isar.getInstance()!;
List<LoggerMessage> _msgBuffer = [];
Timer? _timer;
factory ImmichLogger() => _instance;
ImmichLogger._internal() {
_removeOverflowMessages();
final int levelId = Store.get(StoreKey.logLevel, 5); // 5 is INFO
Logger.root.level = Level.LEVELS[levelId];
Logger.root.onRecord.listen(_writeLogToDatabase);
}
set level(Level level) => Logger.root.level = level;
List<LoggerMessage> get messages {
final inDb =
_db.loggerMessages.where(sort: Sort.desc).anyId().findAllSync();
return _msgBuffer.isEmpty ? inDb : _msgBuffer.reversed.toList() + inDb;
}
void _removeOverflowMessages() {
final msgCount = _db.loggerMessages.countSync();
if (msgCount > maxLogEntries) {
final numberOfEntryToBeDeleted = msgCount - maxLogEntries;
_db.writeTxn(
() => _db.loggerMessages
.where()
.limit(numberOfEntryToBeDeleted)
.deleteAll(),
);
}
}
void _writeLogToDatabase(LogRecord record) {
debugPrint('[${record.level.name}] [${record.time}] ${record.message}');
final lm = LoggerMessage(
message: record.message,
level: record.level.toLogLevel(),
createdAt: record.time,
context1: record.loggerName,
context2: record.stackTrace?.toString(),
);
_msgBuffer.add(lm);
// delayed batch writing to database: increases performance when logging
// messages in quick succession and reduces NAND wear
_timer ??= Timer(const Duration(seconds: 5), _flushBufferToDatabase);
}
void _flushBufferToDatabase() {
_timer = null;
final buffer = _msgBuffer;
_msgBuffer = [];
_db.writeTxn(() => _db.loggerMessages.putAll(buffer));
}
void clearLogs() {
_timer?.cancel();
_timer = null;
_msgBuffer.clear();
_db.writeTxn(() => _db.loggerMessages.clear());
}
Future<void> shareLogs() async {
final tempDir = await getTemporaryDirectory();
final dateTime = DateTime.now().toIso8601String();
final filePath = '${tempDir.path}/Immich_log_$dateTime.csv';
final logFile = await File(filePath).create();
final io = logFile.openWrite();
try {
// Write header
io.write("created_at,level,context,message,stacktrace\n");
// Write messages
for (final m in messages) {
io.write(
'${m.createdAt},${m.level},"${m.context1 ?? ""}","${m.message}","${m.context2 ?? ""}"\n',
);
}
} finally {
await io.flush();
await io.close();
}
// Share file
await Share.shareXFiles(
[XFile(filePath)],
subject: "Immich logs $dateTime",
sharePositionOrigin: Rect.zero,
).then(
(value) => logFile.delete(),
);
}
/// Flush pending log messages to persistent storage
void flush() {
if (_timer != null) {
_timer!.cancel();
_db.writeTxnSync(() => _db.loggerMessages.putAllSync(_msgBuffer));
}
}
}