# FileAppender Class

Module
import { FileAppender } from "@tsed/logger"
Source/packages/logger/src/appenders/components/FileAppender.ts

# Overview

class FileAppender extends BaseAppender {
/**
 *
 */
<span class="token function">reopen</span><span class="token punctuation">(</span><span class="token punctuation">)</span><span class="token punctuation">:</span> Promise&lt;<span class="token keyword">void</span>&gt;<span class="token punctuation">;</span>
/**
 *
 */
<span class="token function">shutdown</span><span class="token punctuation">(</span><span class="token punctuation">)</span><span class="token punctuation">:</span> Promise&lt;<span class="token keyword">any</span>&gt;<span class="token punctuation">;</span>
/**
 *
 * @param loggingEvent
 */
<span class="token function">write</span><span class="token punctuation">(</span>loggingEvent<span class="token punctuation">:</span> <a href="/api/logger/core/LogEvent.html"><span class="token">LogEvent</span></a><span class="token punctuation">)</span><span class="token punctuation">:</span> <span class="token keyword">void</span><span class="token punctuation">;</span>

/**
 *
 * @param file
 * @param fileSize
 * @param numFiles
 * @param options
 * @returns <span class="token punctuation">{</span>streams.RollingFileStream<span class="token punctuation">}</span>
 */

}

# Description

# File Appender

The file appender writes log events to a file. It supports an optional maximum file size, and will keep a configurable number of backups. When using the file appender, you should also call ts-log-debug.shutdown() when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the file appender uses the streamroller library, this is included as a dependency of ts-log-debug so you do not need to include it yourself.

# Configuration

  • type - "file"
  • filename - string - the path of the file where you want your logs written.
  • maxLogSize - integer (optional) - the maximum size (in bytes) for the log file. If not specified, then no log rolling will happen.
  • backups - integer (optional, default value = 5) - the number of old log files to keep during log rolling.
  • layout - (optional, defaults to basic layout) - see layouts

Any other configuration parameters will be passed to the underlying streamroller implementation (see also node.js core file streams):

  • encoding - string (default “utf-8”)
  • mode - integer (default 0644)
  • flags - string (default ‘a’)
  • compress - boolean (default false) - compress the backup files during rolling (backup files will have .gz extension)

# Example

import {Logger} from "ts-log-debug";

const logger = new Logger("loggerName");

logger.appenders.set("log-file", {
    type: "file",
    filename: "all-the-logs.log"
});
logger.debug('I will be logged in all-the-logs.log');
1
2
3
4
5
6
7
8
9

This example will result in a single log file (all-the-logs.log) containing the log messages.

# Example with log rolling (and compressed backups)

import {Logger} from "ts-log-debug";

const logger = new Logger("loggerName");

logger.appenders.set("log-file2", {
    type: "file",
    filename: "all-the-logs.log",
    maxLogSize: 10485760,
    backups: 3,
    compress: true
});
logger.debug('I will be logged in all-the-logs.log');
1
2
3
4
5
6
7
8
9
10
11
12

::: This will result in one current log file (all-the-logs.log). When that reaches 10Mb in size, it will be renamed and compressed to all-the-logs.log.1.gz and a new file opened called all-the-logs.log. When all-the-logs.log reaches 10Mb again, then all-the-logs.log.1.gz will be renamed to all-the-logs.log.2.gz, and so on. :::

# Example with date rolling

import { Logger } from 'ts-log-debug';
export const logger = new Logger('Log Example');

logger.appenders
.set('file', {
  type: 'file',
  filename: `${__dirname}/../logs/myfile.log`,
  pattern: '.yyyy-MM-dd'
});
1
2
3
4
5
6
7
8
9

# Members

reopen(): Promise<void>;

shutdown(): Promise<any>;

write(loggingEvent: LogEvent): void;

Other topics