repo_name
stringclasses 28
values | pr_number
int64 8
3.71k
| pr_title
stringlengths 3
107
| pr_description
stringlengths 0
60.1k
| author
stringlengths 4
19
| date_created
unknown | date_merged
unknown | previous_commit
stringlengths 40
40
| pr_commit
stringlengths 40
40
| query
stringlengths 5
60.1k
| filepath
stringlengths 7
167
| before_content
stringlengths 0
103M
| after_content
stringlengths 0
103M
| label
int64 -1
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./test/tap/configuration-test.js | const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const realFS = require('fs');
const modulePath = 'some/path/to/mylog4js.json';
const pathsChecked = [];
let fakeFS = {};
let dependencies;
let fileRead;
test('log4js configure', (batch) => {
batch.beforeEach((done) => {
fileRead = 0;
fakeFS = {
realpath: realFS.realpath, // fs-extra looks for this
ReadStream: realFS.ReadStream, // need to define these, because graceful-fs uses them
WriteStream: realFS.WriteStream,
read: realFS.read,
closeSync: () => {},
config: {
appenders: {
console: {
type: 'console',
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: {
appenders: ['console'],
level: 'INFO',
},
},
},
readdirSync: (dir) => require('fs').readdirSync(dir),
readFileSync: (file, encoding) => {
fileRead += 1;
batch.type(file, 'string');
batch.equal(file, modulePath);
batch.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: (path) => {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: new Date() };
}
throw new Error('no such file');
},
};
dependencies = {
requires: {
fs: fakeFS,
},
};
if (typeof done === 'function') {
done();
}
});
batch.test(
'when configuration file loaded via LOG4JS_CONFIG env variable',
(t) => {
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
const log4js = sandbox.require('../../lib/log4js', dependencies);
log4js.getLogger('test-logger');
t.equal(fileRead, 1, 'should load the specified local config file');
delete process.env.LOG4JS_CONFIG;
t.end();
}
);
batch.test(
'when configuration is set via configure() method call, return the log4js object',
(t) => {
const log4js = sandbox
.require('../../lib/log4js', dependencies)
.configure(fakeFS.config);
t.type(
log4js,
'object',
'Configure method call should return the log4js object!'
);
const log = log4js.getLogger('daemon');
t.type(
log,
'object',
'log4js object, returned by configure(...) method should be able to create log object.'
);
t.type(log.info, 'function');
t.end();
}
);
batch.end();
});
| const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const realFS = require('fs');
const modulePath = 'some/path/to/mylog4js.json';
const pathsChecked = [];
let fakeFS = {};
let dependencies;
let fileRead;
test('log4js configure', (batch) => {
batch.beforeEach((done) => {
fileRead = 0;
fakeFS = {
realpath: realFS.realpath, // fs-extra looks for this
ReadStream: realFS.ReadStream, // need to define these, because graceful-fs uses them
WriteStream: realFS.WriteStream,
read: realFS.read,
closeSync: () => {},
config: {
appenders: {
console: {
type: 'console',
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: {
appenders: ['console'],
level: 'INFO',
},
},
},
readdirSync: (dir) => require('fs').readdirSync(dir),
readFileSync: (file, encoding) => {
fileRead += 1;
batch.type(file, 'string');
batch.equal(file, modulePath);
batch.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: (path) => {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: new Date() };
}
throw new Error('no such file');
},
};
dependencies = {
requires: {
fs: fakeFS,
},
};
if (typeof done === 'function') {
done();
}
});
batch.test(
'when configuration file loaded via LOG4JS_CONFIG env variable',
(t) => {
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
const log4js = sandbox.require('../../lib/log4js', dependencies);
log4js.getLogger('test-logger');
t.equal(fileRead, 1, 'should load the specified local config file');
delete process.env.LOG4JS_CONFIG;
t.end();
}
);
batch.test(
'when configuration is set via configure() method call, return the log4js object',
(t) => {
const log4js = sandbox
.require('../../lib/log4js', dependencies)
.configure(fakeFS.config);
t.type(
log4js,
'object',
'Configure method call should return the log4js object!'
);
const log = log4js.getLogger('daemon');
t.type(
log,
'object',
'log4js object, returned by configure(...) method should be able to create log object.'
);
t.type(log.info, 'function');
t.end();
}
);
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./docs/migration-guide.md | # Migrating from log4js versions older than 2.x
## Configuration
If you try to use your v1 configuration with v2 code, you'll most likely get an error that says something like 'must have property "appenders" of type object'. The format of the configuration object has changed (see the [api](api.md) docs for details). The main changes are a need for you to name your appenders, and you also have to define the default category. For example, if your v1 config looked like this:
```javascript
{
appenders: [
{ type: "console" },
{
type: "dateFile",
filename: "logs/task",
pattern: "-dd.log",
alwaysIncludePattern: true,
category: "task",
},
];
}
```
Then your v2 config should be something like this:
```javascript
{
appenders: {
out: { type: 'console' },
task: {
type: 'dateFile',
filename: 'logs/task',
pattern: '-dd.log',
alwaysIncludePattern: true
}
},
categories: {
default: { appenders: [ 'out' ], level: 'info' },
task: { appenders: [ 'task' ], level: 'info' }
}
}}
```
The functions to define the configuration programmatically have been removed (`addAppender`, `loadAppender`, etc). All configuration should now be done through the single `configure` function, passing in a filename or object.
## Console replacement
V1 used to allow you to replace the node.js console functions with versions that would log to a log4js appender. This used to cause some weird errors, so I decided it was better to remove it from the log4js core functionality. If you still want to do this, you can replicate the behaviour with code similar to this:
```javascript
log4js.configure(...); // set up your categories and appenders
const logger = log4js.getLogger('console'); // any category will work
console.log = logger.info.bind(logger); // do the same for others - console.debug, etc.
```
## Config Reloading
Previous versions of log4js used to watch for changes in the configuration file and reload when it changed. It didn't always work well, sometimes leaving file handles or sockets open. This feature was removed in version 2.x. As a replacement, I'd suggest using a library like [watchr](https://www.npmjs.com/package/watchr) to notify you of file changes. Then you can call `log4js.shutdown` followed by `log4js.configure` again.
## Appenders
If you have written your own custom appenders, they will not work without modification in v2. See the guide to [writing appenders](writing-appenders.md) for details on how appenders work in 2.x. Note that if you want to write your appender to work with both 1.x and 2.x, then you can tell what version you're running in by examining the number of arguments passed to the `configure` function of your appender: 2 arguments means v1, 4 arguments means v2.
All the core appenders have been upgraded to work with v2, except for the clustered appender which has been removed. The core log4js code handles cluster mode transparently.
The `logFaces` appender was split into two versions to make testing easier and the code simpler; one has HTTP support, the other UDP.
## Exit listeners
Some appenders used to define their own `exit` listeners, and it was never clear whose responsibility it was to clean up resources. Now log4js does not define any `exit` listeners. Instead your application should register an `exit` listener, and call `log4js.shutdown` to be sure that all log messages get written before your application terminates.
## New Features
- MDC contexts - you can now add key-value pairs to a logger (for grouping all log messages from a particular user, for example). Support for these values exists in the [pattern layout](layouts.md), the logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender, and the [multi-file appender](multiFile.md).
- Automatic cluster support - log4js now handles clusters transparently
- Custom levels - you can define your own log levels in the configuration object, including the colours
- Improved performance - several changes have been made to improve performance, especially for the file appenders.
| # Migrating from log4js versions older than 2.x
## Configuration
If you try to use your v1 configuration with v2 code, you'll most likely get an error that says something like 'must have property "appenders" of type object'. The format of the configuration object has changed (see the [api](api.md) docs for details). The main changes are a need for you to name your appenders, and you also have to define the default category. For example, if your v1 config looked like this:
```javascript
{
appenders: [
{ type: "console" },
{
type: "dateFile",
filename: "logs/task",
pattern: "-dd.log",
alwaysIncludePattern: true,
category: "task",
},
];
}
```
Then your v2 config should be something like this:
```javascript
{
appenders: {
out: { type: 'console' },
task: {
type: 'dateFile',
filename: 'logs/task',
pattern: '-dd.log',
alwaysIncludePattern: true
}
},
categories: {
default: { appenders: [ 'out' ], level: 'info' },
task: { appenders: [ 'task' ], level: 'info' }
}
}}
```
The functions to define the configuration programmatically have been removed (`addAppender`, `loadAppender`, etc). All configuration should now be done through the single `configure` function, passing in a filename or object.
## Console replacement
V1 used to allow you to replace the node.js console functions with versions that would log to a log4js appender. This used to cause some weird errors, so I decided it was better to remove it from the log4js core functionality. If you still want to do this, you can replicate the behaviour with code similar to this:
```javascript
log4js.configure(...); // set up your categories and appenders
const logger = log4js.getLogger('console'); // any category will work
console.log = logger.info.bind(logger); // do the same for others - console.debug, etc.
```
## Config Reloading
Previous versions of log4js used to watch for changes in the configuration file and reload when it changed. It didn't always work well, sometimes leaving file handles or sockets open. This feature was removed in version 2.x. As a replacement, I'd suggest using a library like [watchr](https://www.npmjs.com/package/watchr) to notify you of file changes. Then you can call `log4js.shutdown` followed by `log4js.configure` again.
## Appenders
If you have written your own custom appenders, they will not work without modification in v2. See the guide to [writing appenders](writing-appenders.md) for details on how appenders work in 2.x. Note that if you want to write your appender to work with both 1.x and 2.x, then you can tell what version you're running in by examining the number of arguments passed to the `configure` function of your appender: 2 arguments means v1, 4 arguments means v2.
All the core appenders have been upgraded to work with v2, except for the clustered appender which has been removed. The core log4js code handles cluster mode transparently.
The `logFaces` appender was split into two versions to make testing easier and the code simpler; one has HTTP support, the other UDP.
## Exit listeners
Some appenders used to define their own `exit` listeners, and it was never clear whose responsibility it was to clean up resources. Now log4js does not define any `exit` listeners. Instead your application should register an `exit` listener, and call `log4js.shutdown` to be sure that all log messages get written before your application terminates.
## New Features
- MDC contexts - you can now add key-value pairs to a logger (for grouping all log messages from a particular user, for example). Support for these values exists in the [pattern layout](layouts.md), the logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender, and the [multi-file appender](multiFile.md).
- Automatic cluster support - log4js now handles clusters transparently
- Custom levels - you can define your own log levels in the configuration object, including the colours
- Improved performance - several changes have been made to improve performance, especially for the file appenders.
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./.git/hooks/update.sample | #!/bin/sh
#
# An example hook script to block unannotated tags from entering.
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
#
# To enable this hook, rename this file to "update".
#
# Config
# ------
# hooks.allowunannotated
# This boolean sets whether unannotated tags will be allowed into the
# repository. By default they won't be.
# hooks.allowdeletetag
# This boolean sets whether deleting tags will be allowed in the
# repository. By default they won't be.
# hooks.allowmodifytag
# This boolean sets whether a tag may be modified after creation. By default
# it won't be.
# hooks.allowdeletebranch
# This boolean sets whether deleting branches will be allowed in the
# repository. By default they won't be.
# hooks.denycreatebranch
# This boolean sets whether remotely creating branches will be denied
# in the repository. By default this is allowed.
#
# --- Command line
refname="$1"
oldrev="$2"
newrev="$3"
# --- Safety check
if [ -z "$GIT_DIR" ]; then
echo "Don't run this script from the command line." >&2
echo " (if you want, you could supply GIT_DIR then run" >&2
echo " $0 <ref> <oldrev> <newrev>)" >&2
exit 1
fi
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
echo "usage: $0 <ref> <oldrev> <newrev>" >&2
exit 1
fi
# --- Config
allowunannotated=$(git config --bool hooks.allowunannotated)
allowdeletebranch=$(git config --bool hooks.allowdeletebranch)
denycreatebranch=$(git config --bool hooks.denycreatebranch)
allowdeletetag=$(git config --bool hooks.allowdeletetag)
allowmodifytag=$(git config --bool hooks.allowmodifytag)
# check for no description
projectdesc=$(sed -e '1q' "$GIT_DIR/description")
case "$projectdesc" in
"Unnamed repository"* | "")
echo "*** Project description file hasn't been set" >&2
exit 1
;;
esac
# --- Check types
# if $newrev is 0000...0000, it's a commit to delete a ref.
zero="0000000000000000000000000000000000000000"
if [ "$newrev" = "$zero" ]; then
newrev_type=delete
else
newrev_type=$(git cat-file -t $newrev)
fi
case "$refname","$newrev_type" in
refs/tags/*,commit)
# un-annotated tag
short_refname=${refname##refs/tags/}
if [ "$allowunannotated" != "true" ]; then
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
exit 1
fi
;;
refs/tags/*,delete)
# delete tag
if [ "$allowdeletetag" != "true" ]; then
echo "*** Deleting a tag is not allowed in this repository" >&2
exit 1
fi
;;
refs/tags/*,tag)
# annotated tag
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
then
echo "*** Tag '$refname' already exists." >&2
echo "*** Modifying a tag is not allowed in this repository." >&2
exit 1
fi
;;
refs/heads/*,commit)
# branch
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
echo "*** Creating a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/heads/*,delete)
# delete branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/remotes/*,commit)
# tracking branch
;;
refs/remotes/*,delete)
# delete tracking branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a tracking branch is not allowed in this repository" >&2
exit 1
fi
;;
*)
# Anything else (is there anything else?)
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
exit 1
;;
esac
# --- Finished
exit 0
| #!/bin/sh
#
# An example hook script to block unannotated tags from entering.
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
#
# To enable this hook, rename this file to "update".
#
# Config
# ------
# hooks.allowunannotated
# This boolean sets whether unannotated tags will be allowed into the
# repository. By default they won't be.
# hooks.allowdeletetag
# This boolean sets whether deleting tags will be allowed in the
# repository. By default they won't be.
# hooks.allowmodifytag
# This boolean sets whether a tag may be modified after creation. By default
# it won't be.
# hooks.allowdeletebranch
# This boolean sets whether deleting branches will be allowed in the
# repository. By default they won't be.
# hooks.denycreatebranch
# This boolean sets whether remotely creating branches will be denied
# in the repository. By default this is allowed.
#
# --- Command line
refname="$1"
oldrev="$2"
newrev="$3"
# --- Safety check
if [ -z "$GIT_DIR" ]; then
echo "Don't run this script from the command line." >&2
echo " (if you want, you could supply GIT_DIR then run" >&2
echo " $0 <ref> <oldrev> <newrev>)" >&2
exit 1
fi
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
echo "usage: $0 <ref> <oldrev> <newrev>" >&2
exit 1
fi
# --- Config
allowunannotated=$(git config --bool hooks.allowunannotated)
allowdeletebranch=$(git config --bool hooks.allowdeletebranch)
denycreatebranch=$(git config --bool hooks.denycreatebranch)
allowdeletetag=$(git config --bool hooks.allowdeletetag)
allowmodifytag=$(git config --bool hooks.allowmodifytag)
# check for no description
projectdesc=$(sed -e '1q' "$GIT_DIR/description")
case "$projectdesc" in
"Unnamed repository"* | "")
echo "*** Project description file hasn't been set" >&2
exit 1
;;
esac
# --- Check types
# if $newrev is 0000...0000, it's a commit to delete a ref.
zero="0000000000000000000000000000000000000000"
if [ "$newrev" = "$zero" ]; then
newrev_type=delete
else
newrev_type=$(git cat-file -t $newrev)
fi
case "$refname","$newrev_type" in
refs/tags/*,commit)
# un-annotated tag
short_refname=${refname##refs/tags/}
if [ "$allowunannotated" != "true" ]; then
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
exit 1
fi
;;
refs/tags/*,delete)
# delete tag
if [ "$allowdeletetag" != "true" ]; then
echo "*** Deleting a tag is not allowed in this repository" >&2
exit 1
fi
;;
refs/tags/*,tag)
# annotated tag
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
then
echo "*** Tag '$refname' already exists." >&2
echo "*** Modifying a tag is not allowed in this repository." >&2
exit 1
fi
;;
refs/heads/*,commit)
# branch
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
echo "*** Creating a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/heads/*,delete)
# delete branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/remotes/*,commit)
# tracking branch
;;
refs/remotes/*,delete)
# delete tracking branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a tracking branch is not allowed in this repository" >&2
exit 1
fi
;;
*)
# Anything else (is there anything else?)
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
exit 1
;;
esac
# --- Finished
exit 0
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./docs/logLevelFilter.md | # Log Level Filter
The log level filter allows you to restrict the log events that an appender will record based on the level of those events. This is useful when you want most logs to go to a file, but errors to be sent as emails, for example. The filter works by wrapping around another appender and controlling which events get sent to it.
## Configuration
- `type` - `logLevelFilter`
- `appender` - `string` - the name of an appender, defined in the same configuration, that you want to filter
- `level` - `string` - the minimum level of event to allow through the filter
- `maxLevel` - `string` (optional, defaults to `FATAL`) - the maximum level of event to allow through the filter
If an event's level is greater than or equal to `level` and less than or equal to `maxLevel` then it will be sent to the appender.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
emergencies: { type: "file", filename: "panic-now.log" },
"just-errors": {
type: "logLevelFilter",
appender: "emergencies",
level: "error",
},
},
categories: {
default: { appenders: ["just-errors", "everything"], level: "debug" },
},
});
```
Log events of `debug`, `info`, `error`, and `fatal` will go to `all-the-logs.log`. Events of `error` and `fatal` will also go to `panic-now.log`.
| # Log Level Filter
The log level filter allows you to restrict the log events that an appender will record based on the level of those events. This is useful when you want most logs to go to a file, but errors to be sent as emails, for example. The filter works by wrapping around another appender and controlling which events get sent to it.
## Configuration
- `type` - `logLevelFilter`
- `appender` - `string` - the name of an appender, defined in the same configuration, that you want to filter
- `level` - `string` - the minimum level of event to allow through the filter
- `maxLevel` - `string` (optional, defaults to `FATAL`) - the maximum level of event to allow through the filter
If an event's level is greater than or equal to `level` and less than or equal to `maxLevel` then it will be sent to the appender.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
emergencies: { type: "file", filename: "panic-now.log" },
"just-errors": {
type: "logLevelFilter",
appender: "emergencies",
level: "error",
},
},
categories: {
default: { appenders: ["just-errors", "everything"], level: "debug" },
},
});
```
Log events of `debug`, `info`, `error`, and `fatal` will go to `all-the-logs.log`. Events of `error` and `fatal` will also go to `panic-now.log`.
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./test/tap/levels-before-configure-test.js | const { test } = require('tap');
test('Accessing things setup in configure before configure is called', (batch) => {
batch.test('should work', (t) => {
const log4js = require('../../lib/log4js');
t.ok(log4js.levels);
t.ok(log4js.connectLogger);
t.end();
});
batch.end();
});
| const { test } = require('tap');
test('Accessing things setup in configure before configure is called', (batch) => {
batch.test('should work', (t) => {
const log4js = require('../../lib/log4js');
t.ok(log4js.levels);
t.ok(log4js.connectLogger);
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./.husky/commit_msg | #!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx --no -- commitlint --edit $1
| #!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx --no -- commitlint --edit $1
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./test/tap/logging-test.js | const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const util = require('util');
const recording = require('../../lib/appenders/recording');
test('log4js', (batch) => {
batch.test(
'shutdown should return appenders and categories back to initial state',
(t) => {
const stringifyMap = (map) => JSON.stringify(Array.from(map));
const deepCopyMap = (map) => new Map(JSON.parse(stringifyMap(map)));
const log4js = require('../../lib/log4js');
const appenders = require('../../lib/appenders');
const categories = require('../../lib/categories');
const initialAppenders = deepCopyMap(appenders);
const initialCategories = deepCopyMap(categories);
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const configuredAppenders = deepCopyMap(appenders);
const configuredCategories = deepCopyMap(categories);
t.not(
stringifyMap(configuredAppenders),
stringifyMap(initialAppenders),
'appenders should be different from initial state'
);
t.not(
stringifyMap(configuredCategories),
stringifyMap(initialCategories),
'categories should be different from initial state'
);
log4js.shutdown(() => {
const finalAppenders = deepCopyMap(appenders);
const finalCategories = deepCopyMap(categories);
t.equal(
stringifyMap(finalAppenders),
stringifyMap(initialAppenders),
'appenders should revert back to initial state'
);
t.equal(
stringifyMap(finalCategories),
stringifyMap(initialCategories),
'categories should revert back to initial state'
);
t.end();
});
}
);
batch.test('getLogger', (t) => {
const log4js = require('../../lib/log4js');
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const logger = log4js.getLogger('tests');
t.test('should take a category and return a logger', (assert) => {
assert.equal(logger.category, 'tests');
assert.equal(logger.level.toString(), 'DEBUG');
assert.type(logger.debug, 'function');
assert.type(logger.info, 'function');
assert.type(logger.warn, 'function');
assert.type(logger.error, 'function');
assert.type(logger.fatal, 'function');
assert.end();
});
t.test('log events', (assert) => {
recording.reset();
logger.debug('Debug event');
logger.trace('Trace event 1');
logger.trace('Trace event 2');
logger.warn('Warning event');
logger.error('Aargh!', new Error('Pants are on fire!'));
logger.error('Simulated CouchDB problem', {
err: 127,
cause: 'incendiary underwear',
});
const events = recording.replay();
assert.equal(events[0].level.toString(), 'DEBUG');
assert.equal(events[0].data[0], 'Debug event');
assert.type(events[0].startTime, 'Date');
assert.equal(events.length, 4, 'should not emit events of a lower level');
assert.equal(events[1].level.toString(), 'WARN');
assert.type(
events[2].data[1],
'Error',
'should include the error if passed in'
);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
assert.end();
});
t.end();
});
batch.test('when shutdown is called', (t) => {
const events = {
shutdownCalled: [],
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/file': {
name: 'file',
configure() {
function thing(evt) {
events.event = evt;
return null;
}
thing.shutdown = function (cb) {
events.shutdownCalled.push(true);
cb();
};
return thing;
},
},
},
});
const config = {
appenders: {
file: {
type: 'file',
filename: 'cheesy-wotsits.log',
maxLogSize: 1024,
backups: 3,
},
alsoFile: {
type: 'file',
},
},
categories: {
default: { appenders: ['file', 'alsoFile'], level: 'DEBUG' },
},
};
log4js.configure(config);
const logger = log4js.getLogger();
log4js.shutdown(() => {
t.equal(
events.shutdownCalled.length,
2,
'should invoke appender shutdowns'
);
logger.info('this should not go to the appenders');
logger.log('info', 'this should not go to the appenders');
logger._log(require('../../lib/levels').INFO, [
'this should not go to the appenders',
]);
t.notOk(events.event);
t.end();
});
});
batch.test('configuration when passed as filename', (t) => {
let appenderConfig;
let configFilename;
const log4js = sandbox.require('../../lib/log4js', {
ignoreMissing: true,
requires: {
fs: {
statSync() {
return { mtime: Date.now() };
},
readFileSync(filename) {
configFilename = filename;
return JSON.stringify({
appenders: {
file: {
type: 'file',
filename: 'whatever.log',
},
},
categories: { default: { appenders: ['file'], level: 'DEBUG' } },
});
},
readdirSync() {
return ['file'];
},
},
'./file': {
configure(configuration) {
appenderConfig = configuration;
return function () {};
},
},
},
});
log4js.configure('/path/to/cheese.json');
t.equal(
configFilename,
'/path/to/cheese.json',
'should read the config from a file'
);
t.equal(
appenderConfig.filename,
'whatever.log',
'should pass config to appender'
);
t.end();
});
batch.test('with configure not called', (t) => {
const fakeStdoutAppender = {
configure() {
this.required = true;
return function (evt) {
fakeStdoutAppender.evt = evt;
};
},
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/stdout': fakeStdoutAppender,
},
});
const logger = log4js.getLogger('some-logger');
logger.debug('This is a test');
t.ok(fakeStdoutAppender.required, 'stdout should be required');
t.notOk(fakeStdoutAppender.evt, 'should not log anything');
t.end();
});
batch.test('with configure called with empty values', (t) => {
[null, undefined, '', ' ', []].forEach((config) => {
const log4js = require('../../lib/log4js');
const expectedError = `Problem reading config from file "${util.inspect(
config
)}". Error was ENOENT: no such file or directory`;
t.throws(() => log4js.configure(config), expectedError);
});
t.end();
});
batch.test('configuration persistence', (t) => {
const firstLog4js = require('../../lib/log4js');
firstLog4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
recording.reset();
const secondLog4js = require('../../lib/log4js');
secondLog4js
.getLogger()
.info('This should go to the appender defined in firstLog4js');
t.equal(
recording.replay()[0].data[0],
'This should go to the appender defined in firstLog4js'
);
t.end();
});
batch.end();
});
| const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const util = require('util');
const recording = require('../../lib/appenders/recording');
test('log4js', (batch) => {
batch.test(
'shutdown should return appenders and categories back to initial state',
(t) => {
const stringifyMap = (map) => JSON.stringify(Array.from(map));
const deepCopyMap = (map) => new Map(JSON.parse(stringifyMap(map)));
const log4js = require('../../lib/log4js');
const appenders = require('../../lib/appenders');
const categories = require('../../lib/categories');
const initialAppenders = deepCopyMap(appenders);
const initialCategories = deepCopyMap(categories);
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const configuredAppenders = deepCopyMap(appenders);
const configuredCategories = deepCopyMap(categories);
t.not(
stringifyMap(configuredAppenders),
stringifyMap(initialAppenders),
'appenders should be different from initial state'
);
t.not(
stringifyMap(configuredCategories),
stringifyMap(initialCategories),
'categories should be different from initial state'
);
log4js.shutdown(() => {
const finalAppenders = deepCopyMap(appenders);
const finalCategories = deepCopyMap(categories);
t.equal(
stringifyMap(finalAppenders),
stringifyMap(initialAppenders),
'appenders should revert back to initial state'
);
t.equal(
stringifyMap(finalCategories),
stringifyMap(initialCategories),
'categories should revert back to initial state'
);
t.end();
});
}
);
batch.test('getLogger', (t) => {
const log4js = require('../../lib/log4js');
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const logger = log4js.getLogger('tests');
t.test('should take a category and return a logger', (assert) => {
assert.equal(logger.category, 'tests');
assert.equal(logger.level.toString(), 'DEBUG');
assert.type(logger.debug, 'function');
assert.type(logger.info, 'function');
assert.type(logger.warn, 'function');
assert.type(logger.error, 'function');
assert.type(logger.fatal, 'function');
assert.end();
});
t.test('log events', (assert) => {
recording.reset();
logger.debug('Debug event');
logger.trace('Trace event 1');
logger.trace('Trace event 2');
logger.warn('Warning event');
logger.error('Aargh!', new Error('Pants are on fire!'));
logger.error('Simulated CouchDB problem', {
err: 127,
cause: 'incendiary underwear',
});
const events = recording.replay();
assert.equal(events[0].level.toString(), 'DEBUG');
assert.equal(events[0].data[0], 'Debug event');
assert.type(events[0].startTime, 'Date');
assert.equal(events.length, 4, 'should not emit events of a lower level');
assert.equal(events[1].level.toString(), 'WARN');
assert.type(
events[2].data[1],
'Error',
'should include the error if passed in'
);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
assert.end();
});
t.end();
});
batch.test('when shutdown is called', (t) => {
const events = {
shutdownCalled: [],
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/file': {
name: 'file',
configure() {
function thing(evt) {
events.event = evt;
return null;
}
thing.shutdown = function (cb) {
events.shutdownCalled.push(true);
cb();
};
return thing;
},
},
},
});
const config = {
appenders: {
file: {
type: 'file',
filename: 'cheesy-wotsits.log',
maxLogSize: 1024,
backups: 3,
},
alsoFile: {
type: 'file',
},
},
categories: {
default: { appenders: ['file', 'alsoFile'], level: 'DEBUG' },
},
};
log4js.configure(config);
const logger = log4js.getLogger();
log4js.shutdown(() => {
t.equal(
events.shutdownCalled.length,
2,
'should invoke appender shutdowns'
);
logger.info('this should not go to the appenders');
logger.log('info', 'this should not go to the appenders');
logger._log(require('../../lib/levels').INFO, [
'this should not go to the appenders',
]);
t.notOk(events.event);
t.end();
});
});
batch.test('configuration when passed as filename', (t) => {
let appenderConfig;
let configFilename;
const log4js = sandbox.require('../../lib/log4js', {
ignoreMissing: true,
requires: {
fs: {
statSync() {
return { mtime: Date.now() };
},
readFileSync(filename) {
configFilename = filename;
return JSON.stringify({
appenders: {
file: {
type: 'file',
filename: 'whatever.log',
},
},
categories: { default: { appenders: ['file'], level: 'DEBUG' } },
});
},
readdirSync() {
return ['file'];
},
},
'./file': {
configure(configuration) {
appenderConfig = configuration;
return function () {};
},
},
},
});
log4js.configure('/path/to/cheese.json');
t.equal(
configFilename,
'/path/to/cheese.json',
'should read the config from a file'
);
t.equal(
appenderConfig.filename,
'whatever.log',
'should pass config to appender'
);
t.end();
});
batch.test('with configure not called', (t) => {
const fakeStdoutAppender = {
configure() {
this.required = true;
return function (evt) {
fakeStdoutAppender.evt = evt;
};
},
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/stdout': fakeStdoutAppender,
},
});
const logger = log4js.getLogger('some-logger');
logger.debug('This is a test');
t.ok(fakeStdoutAppender.required, 'stdout should be required');
t.notOk(fakeStdoutAppender.evt, 'should not log anything');
t.end();
});
batch.test('with configure called with empty values', (t) => {
[null, undefined, '', ' ', []].forEach((config) => {
const log4js = require('../../lib/log4js');
const expectedError = `Problem reading config from file "${util.inspect(
config
)}". Error was ENOENT: no such file or directory`;
t.throws(() => log4js.configure(config), expectedError);
});
t.end();
});
batch.test('configuration persistence', (t) => {
const firstLog4js = require('../../lib/log4js');
firstLog4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
recording.reset();
const secondLog4js = require('../../lib/log4js');
secondLog4js
.getLogger()
.info('This should go to the appender defined in firstLog4js');
t.equal(
recording.replay()[0].data[0],
'This should go to the appender defined in firstLog4js'
);
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./test/tap/connect-logger-test.js | /* eslint max-classes-per-file: ["error", 2] */
const { test } = require('tap');
const EE = require('events').EventEmitter;
const levels = require('../../lib/levels');
class MockLogger {
constructor() {
this.level = levels.TRACE;
this.messages = [];
this.log = function (level, message) {
this.messages.push({ level, message });
};
this.isLevelEnabled = function (level) {
return level.isGreaterThanOrEqualTo(this.level);
};
}
}
function MockRequest(remoteAddr, method, originalUrl, headers, url, custom) {
this.socket = { remoteAddress: remoteAddr };
this.originalUrl = originalUrl;
this.url = url;
this.method = method;
this.httpVersionMajor = '5';
this.httpVersionMinor = '0';
this.headers = headers || {};
if (custom) {
for (const key of Object.keys(custom)) {
this[key] = custom[key];
}
}
const self = this;
Object.keys(this.headers).forEach((key) => {
self.headers[key.toLowerCase()] = self.headers[key];
});
}
class MockResponse extends EE {
constructor() {
super();
this.cachedHeaders = {};
}
end() {
this.emit('finish');
}
setHeader(key, value) {
this.cachedHeaders[key.toLowerCase()] = value;
}
getHeader(key) {
return this.cachedHeaders[key.toLowerCase()];
}
writeHead(code /* , headers */) {
this.statusCode = code;
}
}
function request(
cl,
method,
originalUrl,
code,
reqHeaders,
resHeaders,
next,
url,
custom = undefined
) {
const req = new MockRequest(
'my.remote.addr',
method,
originalUrl,
reqHeaders,
url,
custom
);
const res = new MockResponse();
if (next) {
next = next.bind(null, req, res, () => {});
} else {
next = () => {};
}
cl(req, res, next);
res.writeHead(code, resHeaders);
res.end('chunk', 'encoding');
}
test('log4js connect logger', (batch) => {
const clm = require('../../lib/connect-logger');
batch.test('getConnectLoggerModule', (t) => {
t.type(clm, 'function', 'should return a connect logger factory');
t.test(
'should take a log4js logger and return a "connect logger"',
(assert) => {
const ml = new MockLogger();
const cl = clm(ml);
assert.type(cl, 'function');
assert.end();
}
);
t.test('log events', (assert) => {
const ml = new MockLogger();
const cl = clm(ml);
request(cl, 'GET', 'http://url', 200);
const { messages } = ml;
assert.type(messages, 'Array');
assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.match(messages[0].message, 'GET');
assert.match(messages[0].message, 'http://url');
assert.match(messages[0].message, 'my.remote.addr');
assert.match(messages[0].message, '200');
assert.end();
});
t.test('log events with level below logging level', (assert) => {
const ml = new MockLogger();
ml.level = levels.FATAL;
const cl = clm(ml);
request(cl, 'GET', 'http://url', 200);
assert.type(ml.messages, 'Array');
assert.equal(ml.messages.length, 0);
assert.end();
});
t.test('log events with non-default level and custom format', (assert) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, { level: levels.WARN, format: ':method :url' });
request(cl, 'GET', 'http://url', 200);
const { messages } = ml;
assert.type(messages, Array);
assert.equal(messages.length, 1);
assert.ok(levels.WARN.isEqualTo(messages[0].level));
assert.equal(messages[0].message, 'GET http://url');
assert.end();
});
t.test('adding multiple loggers should only log once', (assert) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, { level: levels.WARN, format: ':method :url' });
const nextLogger = clm(ml, { level: levels.INFO, format: ':method' });
request(cl, 'GET', 'http://url', 200, null, null, nextLogger);
const { messages } = ml;
assert.type(messages, Array);
assert.equal(messages.length, 1);
assert.ok(levels.WARN.isEqualTo(messages[0].level));
assert.equal(messages[0].message, 'GET http://url');
assert.end();
});
t.end();
});
batch.test('logger with options as string', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, ':method :url');
request(cl, 'POST', 'http://meh', 200);
const { messages } = ml;
t.equal(messages[0].message, 'POST http://meh');
t.end();
});
batch.test('auto log levels', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, { level: 'auto', format: ':method :url' });
request(cl, 'GET', 'http://meh', 200);
request(cl, 'GET', 'http://meh', 201);
request(cl, 'GET', 'http://meh', 302);
request(cl, 'GET', 'http://meh', 404);
request(cl, 'GET', 'http://meh', 500);
const { messages } = ml;
t.test('should use INFO for 2xx', (assert) => {
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.ok(levels.INFO.isEqualTo(messages[1].level));
assert.end();
});
t.test('should use WARN for 3xx', (assert) => {
assert.ok(levels.WARN.isEqualTo(messages[2].level));
assert.end();
});
t.test('should use ERROR for 4xx', (assert) => {
assert.ok(levels.ERROR.isEqualTo(messages[3].level));
assert.end();
});
t.test('should use ERROR for 5xx', (assert) => {
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
assert.end();
});
t.end();
});
batch.test('logger with status code rules applied', (t) => {
const ml = new MockLogger();
ml.level = levels.DEBUG;
const clr = [
{ codes: [201, 304], level: levels.DEBUG.toString() },
{ from: 200, to: 299, level: levels.DEBUG.toString() },
{ from: 300, to: 399, level: levels.INFO.toString() },
];
const cl = clm(ml, {
level: 'auto',
format: ':method :url',
statusRules: clr,
});
request(cl, 'GET', 'http://meh', 200);
request(cl, 'GET', 'http://meh', 201);
request(cl, 'GET', 'http://meh', 302);
request(cl, 'GET', 'http://meh', 304);
request(cl, 'GET', 'http://meh', 404);
request(cl, 'GET', 'http://meh', 500);
const { messages } = ml;
t.test('should use DEBUG for 2xx', (assert) => {
assert.ok(levels.DEBUG.isEqualTo(messages[0].level));
assert.ok(levels.DEBUG.isEqualTo(messages[1].level));
assert.end();
});
t.test('should use WARN for 3xx, DEBUG for 304', (assert) => {
assert.ok(levels.INFO.isEqualTo(messages[2].level));
assert.ok(levels.DEBUG.isEqualTo(messages[3].level));
assert.end();
});
t.test('should use ERROR for 4xx', (assert) => {
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
assert.end();
});
t.test('should use ERROR for 5xx', (assert) => {
assert.ok(levels.ERROR.isEqualTo(messages[5].level));
assert.end();
});
t.end();
});
batch.test('format using a function', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, () => 'I was called');
request(cl, 'GET', 'http://blah', 200);
t.equal(ml.messages[0].message, 'I was called');
t.end();
});
batch.test('format using a function that also uses tokens', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(
ml,
(req, res, tokenReplacer) => `${req.method} ${tokenReplacer(':status')}`
);
request(cl, 'GET', 'http://blah', 200);
t.equal(ml.messages[0].message, 'GET 200');
t.end();
});
batch.test(
'format using a function, but do not log anything if the function returns nothing',
(t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, () => null);
request(cl, 'GET', 'http://blah', 200);
t.equal(ml.messages.length, 0);
t.end();
}
);
batch.test('format that includes request headers', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, ':req[Content-Type]');
request(cl, 'GET', 'http://blah', 200, {
'Content-Type': 'application/json',
});
t.equal(ml.messages[0].message, 'application/json');
t.end();
});
batch.test('format that includes response headers', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, ':res[Content-Type]');
request(cl, 'GET', 'http://blah', 200, null, {
'Content-Type': 'application/cheese',
});
t.equal(ml.messages[0].message, 'application/cheese');
t.end();
});
batch.test('url token should check originalUrl and url', (t) => {
const ml = new MockLogger();
const cl = clm(ml, ':url');
request(cl, 'GET', null, 200, null, null, null, 'http://cheese');
t.equal(ml.messages[0].message, 'http://cheese');
t.end();
});
batch.test('log events with custom token', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, {
level: levels.INFO,
format: ':method :url :custom_string',
tokens: [
{
token: ':custom_string',
replacement: 'fooBAR',
},
],
});
request(cl, 'GET', 'http://url', 200);
t.type(ml.messages, 'Array');
t.equal(ml.messages.length, 1);
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
t.equal(ml.messages[0].message, 'GET http://url fooBAR');
t.end();
});
batch.test('log events with custom override token', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, {
level: levels.INFO,
format: ':method :url :date',
tokens: [
{
token: ':date',
replacement: '20150310',
},
],
});
request(cl, 'GET', 'http://url', 200);
t.type(ml.messages, 'Array');
t.equal(ml.messages.length, 1);
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
t.equal(ml.messages[0].message, 'GET http://url 20150310');
t.end();
});
batch.test('log events with custom format', (t) => {
const ml = new MockLogger();
const body = { say: 'hi!' };
ml.level = levels.INFO;
const cl = clm(ml, {
level: levels.INFO,
format: (req, res, format) =>
format(`:method :url ${JSON.stringify(req.body)}`),
});
request(
cl,
'POST',
'http://url',
200,
{ 'Content-Type': 'application/json' },
null,
null,
null,
{ body }
);
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
t.equal(ml.messages[0].message, `POST http://url ${JSON.stringify(body)}`);
t.end();
});
batch.test(
'handle weird old node versions where socket contains socket',
(t) => {
const ml = new MockLogger();
const cl = clm(ml, ':remote-addr');
const req = new MockRequest(null, 'GET', 'http://blah');
req.socket = { socket: { remoteAddress: 'this is weird' } };
const res = new MockResponse();
cl(req, res, () => {});
res.writeHead(200, {});
res.end('chunk', 'encoding');
t.equal(ml.messages[0].message, 'this is weird');
t.end();
}
);
batch.test(
'handles as soon as any of the events end/finish/error/close triggers (only once)',
(t) => {
const ml = new MockLogger();
const cl = clm(ml, ':remote-addr');
const req = new MockRequest(null, 'GET', 'http://blah');
req.socket = { socket: { remoteAddress: 'this is weird' } };
const res = new MockResponse();
cl(req, res, () => {});
res.writeHead(200, {});
t.equal(ml.messages.length, 0);
res.emit('end');
res.emit('finish');
res.emit('error');
res.emit('close');
t.equal(ml.messages.length, 1);
t.equal(ml.messages[0].message, 'this is weird');
t.end();
}
);
batch.end();
});
| /* eslint max-classes-per-file: ["error", 2] */
const { test } = require('tap');
const EE = require('events').EventEmitter;
const levels = require('../../lib/levels');
class MockLogger {
constructor() {
this.level = levels.TRACE;
this.messages = [];
this.log = function (level, message) {
this.messages.push({ level, message });
};
this.isLevelEnabled = function (level) {
return level.isGreaterThanOrEqualTo(this.level);
};
}
}
function MockRequest(remoteAddr, method, originalUrl, headers, url, custom) {
this.socket = { remoteAddress: remoteAddr };
this.originalUrl = originalUrl;
this.url = url;
this.method = method;
this.httpVersionMajor = '5';
this.httpVersionMinor = '0';
this.headers = headers || {};
if (custom) {
for (const key of Object.keys(custom)) {
this[key] = custom[key];
}
}
const self = this;
Object.keys(this.headers).forEach((key) => {
self.headers[key.toLowerCase()] = self.headers[key];
});
}
class MockResponse extends EE {
constructor() {
super();
this.cachedHeaders = {};
}
end() {
this.emit('finish');
}
setHeader(key, value) {
this.cachedHeaders[key.toLowerCase()] = value;
}
getHeader(key) {
return this.cachedHeaders[key.toLowerCase()];
}
writeHead(code /* , headers */) {
this.statusCode = code;
}
}
function request(
cl,
method,
originalUrl,
code,
reqHeaders,
resHeaders,
next,
url,
custom = undefined
) {
const req = new MockRequest(
'my.remote.addr',
method,
originalUrl,
reqHeaders,
url,
custom
);
const res = new MockResponse();
if (next) {
next = next.bind(null, req, res, () => {});
} else {
next = () => {};
}
cl(req, res, next);
res.writeHead(code, resHeaders);
res.end('chunk', 'encoding');
}
test('log4js connect logger', (batch) => {
const clm = require('../../lib/connect-logger');
batch.test('getConnectLoggerModule', (t) => {
t.type(clm, 'function', 'should return a connect logger factory');
t.test(
'should take a log4js logger and return a "connect logger"',
(assert) => {
const ml = new MockLogger();
const cl = clm(ml);
assert.type(cl, 'function');
assert.end();
}
);
t.test('log events', (assert) => {
const ml = new MockLogger();
const cl = clm(ml);
request(cl, 'GET', 'http://url', 200);
const { messages } = ml;
assert.type(messages, 'Array');
assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.match(messages[0].message, 'GET');
assert.match(messages[0].message, 'http://url');
assert.match(messages[0].message, 'my.remote.addr');
assert.match(messages[0].message, '200');
assert.end();
});
t.test('log events with level below logging level', (assert) => {
const ml = new MockLogger();
ml.level = levels.FATAL;
const cl = clm(ml);
request(cl, 'GET', 'http://url', 200);
assert.type(ml.messages, 'Array');
assert.equal(ml.messages.length, 0);
assert.end();
});
t.test('log events with non-default level and custom format', (assert) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, { level: levels.WARN, format: ':method :url' });
request(cl, 'GET', 'http://url', 200);
const { messages } = ml;
assert.type(messages, Array);
assert.equal(messages.length, 1);
assert.ok(levels.WARN.isEqualTo(messages[0].level));
assert.equal(messages[0].message, 'GET http://url');
assert.end();
});
t.test('adding multiple loggers should only log once', (assert) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, { level: levels.WARN, format: ':method :url' });
const nextLogger = clm(ml, { level: levels.INFO, format: ':method' });
request(cl, 'GET', 'http://url', 200, null, null, nextLogger);
const { messages } = ml;
assert.type(messages, Array);
assert.equal(messages.length, 1);
assert.ok(levels.WARN.isEqualTo(messages[0].level));
assert.equal(messages[0].message, 'GET http://url');
assert.end();
});
t.end();
});
batch.test('logger with options as string', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, ':method :url');
request(cl, 'POST', 'http://meh', 200);
const { messages } = ml;
t.equal(messages[0].message, 'POST http://meh');
t.end();
});
batch.test('auto log levels', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, { level: 'auto', format: ':method :url' });
request(cl, 'GET', 'http://meh', 200);
request(cl, 'GET', 'http://meh', 201);
request(cl, 'GET', 'http://meh', 302);
request(cl, 'GET', 'http://meh', 404);
request(cl, 'GET', 'http://meh', 500);
const { messages } = ml;
t.test('should use INFO for 2xx', (assert) => {
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.ok(levels.INFO.isEqualTo(messages[1].level));
assert.end();
});
t.test('should use WARN for 3xx', (assert) => {
assert.ok(levels.WARN.isEqualTo(messages[2].level));
assert.end();
});
t.test('should use ERROR for 4xx', (assert) => {
assert.ok(levels.ERROR.isEqualTo(messages[3].level));
assert.end();
});
t.test('should use ERROR for 5xx', (assert) => {
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
assert.end();
});
t.end();
});
batch.test('logger with status code rules applied', (t) => {
const ml = new MockLogger();
ml.level = levels.DEBUG;
const clr = [
{ codes: [201, 304], level: levels.DEBUG.toString() },
{ from: 200, to: 299, level: levels.DEBUG.toString() },
{ from: 300, to: 399, level: levels.INFO.toString() },
];
const cl = clm(ml, {
level: 'auto',
format: ':method :url',
statusRules: clr,
});
request(cl, 'GET', 'http://meh', 200);
request(cl, 'GET', 'http://meh', 201);
request(cl, 'GET', 'http://meh', 302);
request(cl, 'GET', 'http://meh', 304);
request(cl, 'GET', 'http://meh', 404);
request(cl, 'GET', 'http://meh', 500);
const { messages } = ml;
t.test('should use DEBUG for 2xx', (assert) => {
assert.ok(levels.DEBUG.isEqualTo(messages[0].level));
assert.ok(levels.DEBUG.isEqualTo(messages[1].level));
assert.end();
});
t.test('should use WARN for 3xx, DEBUG for 304', (assert) => {
assert.ok(levels.INFO.isEqualTo(messages[2].level));
assert.ok(levels.DEBUG.isEqualTo(messages[3].level));
assert.end();
});
t.test('should use ERROR for 4xx', (assert) => {
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
assert.end();
});
t.test('should use ERROR for 5xx', (assert) => {
assert.ok(levels.ERROR.isEqualTo(messages[5].level));
assert.end();
});
t.end();
});
batch.test('format using a function', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, () => 'I was called');
request(cl, 'GET', 'http://blah', 200);
t.equal(ml.messages[0].message, 'I was called');
t.end();
});
batch.test('format using a function that also uses tokens', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(
ml,
(req, res, tokenReplacer) => `${req.method} ${tokenReplacer(':status')}`
);
request(cl, 'GET', 'http://blah', 200);
t.equal(ml.messages[0].message, 'GET 200');
t.end();
});
batch.test(
'format using a function, but do not log anything if the function returns nothing',
(t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, () => null);
request(cl, 'GET', 'http://blah', 200);
t.equal(ml.messages.length, 0);
t.end();
}
);
batch.test('format that includes request headers', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, ':req[Content-Type]');
request(cl, 'GET', 'http://blah', 200, {
'Content-Type': 'application/json',
});
t.equal(ml.messages[0].message, 'application/json');
t.end();
});
batch.test('format that includes response headers', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, ':res[Content-Type]');
request(cl, 'GET', 'http://blah', 200, null, {
'Content-Type': 'application/cheese',
});
t.equal(ml.messages[0].message, 'application/cheese');
t.end();
});
batch.test('url token should check originalUrl and url', (t) => {
const ml = new MockLogger();
const cl = clm(ml, ':url');
request(cl, 'GET', null, 200, null, null, null, 'http://cheese');
t.equal(ml.messages[0].message, 'http://cheese');
t.end();
});
batch.test('log events with custom token', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, {
level: levels.INFO,
format: ':method :url :custom_string',
tokens: [
{
token: ':custom_string',
replacement: 'fooBAR',
},
],
});
request(cl, 'GET', 'http://url', 200);
t.type(ml.messages, 'Array');
t.equal(ml.messages.length, 1);
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
t.equal(ml.messages[0].message, 'GET http://url fooBAR');
t.end();
});
batch.test('log events with custom override token', (t) => {
const ml = new MockLogger();
ml.level = levels.INFO;
const cl = clm(ml, {
level: levels.INFO,
format: ':method :url :date',
tokens: [
{
token: ':date',
replacement: '20150310',
},
],
});
request(cl, 'GET', 'http://url', 200);
t.type(ml.messages, 'Array');
t.equal(ml.messages.length, 1);
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
t.equal(ml.messages[0].message, 'GET http://url 20150310');
t.end();
});
batch.test('log events with custom format', (t) => {
const ml = new MockLogger();
const body = { say: 'hi!' };
ml.level = levels.INFO;
const cl = clm(ml, {
level: levels.INFO,
format: (req, res, format) =>
format(`:method :url ${JSON.stringify(req.body)}`),
});
request(
cl,
'POST',
'http://url',
200,
{ 'Content-Type': 'application/json' },
null,
null,
null,
{ body }
);
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
t.equal(ml.messages[0].message, `POST http://url ${JSON.stringify(body)}`);
t.end();
});
batch.test(
'handle weird old node versions where socket contains socket',
(t) => {
const ml = new MockLogger();
const cl = clm(ml, ':remote-addr');
const req = new MockRequest(null, 'GET', 'http://blah');
req.socket = { socket: { remoteAddress: 'this is weird' } };
const res = new MockResponse();
cl(req, res, () => {});
res.writeHead(200, {});
res.end('chunk', 'encoding');
t.equal(ml.messages[0].message, 'this is weird');
t.end();
}
);
batch.test(
'handles as soon as any of the events end/finish/error/close triggers (only once)',
(t) => {
const ml = new MockLogger();
const cl = clm(ml, ':remote-addr');
const req = new MockRequest(null, 'GET', 'http://blah');
req.socket = { socket: { remoteAddress: 'this is weird' } };
const res = new MockResponse();
cl(req, res, () => {});
res.writeHead(200, {});
t.equal(ml.messages.length, 0);
res.emit('end');
res.emit('finish');
res.emit('error');
res.emit('close');
t.equal(ml.messages.length, 1);
t.equal(ml.messages[0].message, 'this is weird');
t.end();
}
);
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./examples/hipchat-appender.js | /**
* !!! The hipchat-appender requires `hipchat-notifier` from npm, e.g.
* - list as a dependency in your application's package.json ||
* - npm install hipchat-notifier
*/
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
hipchat: {
type: 'hipchat',
hipchat_token:
process.env.HIPCHAT_TOKEN ||
'< User token with Notification Privileges >',
hipchat_room: process.env.HIPCHAT_ROOM || '< Room ID or Name >',
},
},
categories: {
default: { appenders: ['hipchat'], level: 'trace' },
},
});
const logger = log4js.getLogger('hipchat');
logger.warn('Test Warn message');
logger.info('Test Info message');
logger.debug('Test Debug Message');
logger.trace('Test Trace Message');
logger.fatal('Test Fatal Message');
logger.error('Test Error Message');
// alternative configuration demonstrating callback + custom layout
// /////////////////////////////////////////////////////////////////
// use a custom layout function (in this case, the provided basicLayout)
// format: [TIMESTAMP][LEVEL][category] - [message]
log4js.configure({
appenders: {
hipchat: {
type: 'hipchat',
hipchat_token:
process.env.HIPCHAT_TOKEN ||
'< User token with Notification Privileges >',
hipchat_room: process.env.HIPCHAT_ROOM || '< Room ID or Name >',
hipchat_from: 'Mr. Semantics',
hipchat_notify: false,
hipchat_response_callback: function (err, response, body) {
if (err || response.statusCode > 300) {
throw new Error('hipchat-notifier failed');
}
console.log('mr semantics callback success');
},
layout: { type: 'basic' },
},
},
categories: { default: { appenders: ['hipchat'], level: 'trace' } },
});
logger.info('Test customLayout from Mr. Semantics');
| /**
* !!! The hipchat-appender requires `hipchat-notifier` from npm, e.g.
* - list as a dependency in your application's package.json ||
* - npm install hipchat-notifier
*/
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
hipchat: {
type: 'hipchat',
hipchat_token:
process.env.HIPCHAT_TOKEN ||
'< User token with Notification Privileges >',
hipchat_room: process.env.HIPCHAT_ROOM || '< Room ID or Name >',
},
},
categories: {
default: { appenders: ['hipchat'], level: 'trace' },
},
});
const logger = log4js.getLogger('hipchat');
logger.warn('Test Warn message');
logger.info('Test Info message');
logger.debug('Test Debug Message');
logger.trace('Test Trace Message');
logger.fatal('Test Fatal Message');
logger.error('Test Error Message');
// alternative configuration demonstrating callback + custom layout
// /////////////////////////////////////////////////////////////////
// use a custom layout function (in this case, the provided basicLayout)
// format: [TIMESTAMP][LEVEL][category] - [message]
log4js.configure({
appenders: {
hipchat: {
type: 'hipchat',
hipchat_token:
process.env.HIPCHAT_TOKEN ||
'< User token with Notification Privileges >',
hipchat_room: process.env.HIPCHAT_ROOM || '< Room ID or Name >',
hipchat_from: 'Mr. Semantics',
hipchat_notify: false,
hipchat_response_callback: function (err, response, body) {
if (err || response.statusCode > 300) {
throw new Error('hipchat-notifier failed');
}
console.log('mr semantics callback success');
},
layout: { type: 'basic' },
},
},
categories: { default: { appenders: ['hipchat'], level: 'trace' } },
});
logger.info('Test customLayout from Mr. Semantics');
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./.git/refs/heads/master | bd457888eb91b9e932fe8f66d720cf2d9d6442f4
| bd457888eb91b9e932fe8f66d720cf2d9d6442f4
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./examples/example.js | 'use strict';
const log4js = require('../lib/log4js');
// log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' },
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
},
});
// a custom logger outside of the log4js/lib/appenders directory can be accessed like so
// log4js.configure({
// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } }
// ...
// });
const logger = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', {
some: 'otherObject',
useful_for: 'debug purposes',
});
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.log('Something funny about cheese.');
logger.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger.error('Cheese %s is too ripe!', 'gouda');
logger.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
| 'use strict';
const log4js = require('../lib/log4js');
// log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' },
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
},
});
// a custom logger outside of the log4js/lib/appenders directory can be accessed like so
// log4js.configure({
// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } }
// ...
// });
const logger = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', {
some: 'otherObject',
useful_for: 'debug purposes',
});
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.log('Something funny about cheese.');
logger.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger.error('Cheese %s is too ripe!', 'gouda');
logger.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./docs/connect-logger.md | # Connect / Express Logger
The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`.
```javascript
var log4js = require("log4js");
var express = require("express");
log4js.configure({
appenders: {
console: { type: "console" },
file: { type: "file", filename: "cheese.log" },
},
categories: {
cheese: { appenders: ["file"], level: "info" },
default: { appenders: ["console"], level: "info" },
},
});
var logger = log4js.getLogger("cheese");
var app = express();
app.use(log4js.connectLogger(logger, { level: "info" }));
app.get("/", function (req, res) {
res.send("hello world");
});
app.listen(5000);
```
The log4js.connectLogger supports the passing of an options object that can be used to set the following:
- log level
- log format string or function (the same as the connect/express logger)
- nolog expressions (represented as a string, regexp, array, or function(req, res))
- status code rulesets
For example:
```javascript
app.use(
log4js.connectLogger(logger, {
level: log4js.levels.INFO,
format: ":method :url",
})
);
```
or:
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
// include the Express request ID in the logs
format: (req, res, format) =>
format(
`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
),
})
);
```
When you request of POST, you want to log the request body parameter like JSON.
The log format function is very useful.
Please use log format function instead "tokens" property for use express's request or response.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "info",
format: (req, res, format) =>
format(`:remote-addr :method :url ${JSON.stringify(req.body)}`),
})
);
```
Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x.
- http responses 3xx, level = WARN
- http responses 4xx & 5xx, level = ERROR
- else, level = INFO
```javascript
app.use(log4js.connectLogger(logger, { level: "auto" }));
```
The levels of returned status codes can be configured via status code rulesets.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
statusRules: [
{ from: 200, to: 299, level: "debug" },
{ codes: [303, 304], level: "info" },
],
})
);
```
The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, array, or function(req, res) to omit certain log messages. Example of 1.2 below.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: "\\.gif|\\.jpg$",
})
);
```
or
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: (req, res) => res.statusCode < 400,
})
);
```
The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`.
Application can use it in layouts or appenders.
In application:
```javascript
app.use(log4js.connectLogger(logger, { context: true }));
```
In layout:
```javascript
log4js.addLayout("customLayout", () => {
return (loggingEvent) => {
const res = loggingEvent.context.res;
return util.format(
...loggingEvent.data,
res ? `status: ${res.statusCode}` : ""
);
};
});
```
## Example nolog values
| nolog value | Will Not Log | Will Log |
| --------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------ |
| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif |
| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge |
| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 |
| `/\.(gif\|jpe?g\|png)$/` | as above | as above |
| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` |
| # Connect / Express Logger
The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`.
```javascript
var log4js = require("log4js");
var express = require("express");
log4js.configure({
appenders: {
console: { type: "console" },
file: { type: "file", filename: "cheese.log" },
},
categories: {
cheese: { appenders: ["file"], level: "info" },
default: { appenders: ["console"], level: "info" },
},
});
var logger = log4js.getLogger("cheese");
var app = express();
app.use(log4js.connectLogger(logger, { level: "info" }));
app.get("/", function (req, res) {
res.send("hello world");
});
app.listen(5000);
```
The log4js.connectLogger supports the passing of an options object that can be used to set the following:
- log level
- log format string or function (the same as the connect/express logger)
- nolog expressions (represented as a string, regexp, array, or function(req, res))
- status code rulesets
For example:
```javascript
app.use(
log4js.connectLogger(logger, {
level: log4js.levels.INFO,
format: ":method :url",
})
);
```
or:
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
// include the Express request ID in the logs
format: (req, res, format) =>
format(
`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
),
})
);
```
When you request of POST, you want to log the request body parameter like JSON.
The log format function is very useful.
Please use log format function instead "tokens" property for use express's request or response.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "info",
format: (req, res, format) =>
format(`:remote-addr :method :url ${JSON.stringify(req.body)}`),
})
);
```
Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x.
- http responses 3xx, level = WARN
- http responses 4xx & 5xx, level = ERROR
- else, level = INFO
```javascript
app.use(log4js.connectLogger(logger, { level: "auto" }));
```
The levels of returned status codes can be configured via status code rulesets.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
statusRules: [
{ from: 200, to: 299, level: "debug" },
{ codes: [303, 304], level: "info" },
],
})
);
```
The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, array, or function(req, res) to omit certain log messages. Example of 1.2 below.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: "\\.gif|\\.jpg$",
})
);
```
or
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: (req, res) => res.statusCode < 400,
})
);
```
The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`.
Application can use it in layouts or appenders.
In application:
```javascript
app.use(log4js.connectLogger(logger, { context: true }));
```
In layout:
```javascript
log4js.addLayout("customLayout", () => {
return (loggingEvent) => {
const res = loggingEvent.context.res;
return util.format(
...loggingEvent.data,
res ? `status: ${res.statusCode}` : ""
);
};
});
```
## Example nolog values
| nolog value | Will Not Log | Will Log |
| --------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------ |
| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif |
| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge |
| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 |
| `/\.(gif\|jpe?g\|png)$/` | as above | as above |
| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` |
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./docs/multiFile.md | # MultiFile Appender
The multiFile appender can be used to dynamically write logs to multiple files, based on a property of the logging event. Use this as a way to write separate log files for each category when the number of categories is unknown, for instance. It creates [file](file.md) appenders under the hood, so all the options that apply to that appender (apart from filename) can be used with this one, allowing the log files to be rotated and capped at a certain size.
## Configuration
- `type` - `"multiFile"`
- `base` - `string` - the base part of the generated log filename
- `property` - `string` - the value to use to split files (see below).
- `extension` - `string` - the suffix for the generated log filename.
- `timeout` - `integer` - optional activity timeout in ms after which the file will be closed.
All other properties will be passed to the created [file](file.md) appenders. For the property value, `categoryName` is probably the most useful - although you could use `pid` or `level`. If the property is not found then the appender will look for the value in the context map. If that fails, then the logger will not output the logging event, without an error. This is to allow for dynamic properties which may not exist for all log messages.
## Example (split on category)
```javascript
log4js.configure({
appenders: {
multi: {
type: "multiFile",
base: "logs/",
property: "categoryName",
extension: ".log",
},
},
categories: {
default: { appenders: ["multi"], level: "debug" },
},
});
const logger = log4js.getLogger();
logger.debug("I will be logged in logs/default.log");
const otherLogger = log4js.getLogger("cheese");
otherLogger.info("Cheese is cheddar - this will be logged in logs/cheese.log");
```
This example will result in two log files (`logs/default.log` and `logs/cheese.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: "multiFile",
base: "logs/",
property: "userID",
extension: ".log",
maxLogSize: 10485760,
backups: 3,
compress: true,
},
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
const userLogger = log4js.getLogger("user");
userLogger.addContext("userID", user.getID());
userLogger.info("this user just logged in");
```
This will result in one log file (`logs/u12345.log`), capped at 10Mb in size, with three backups kept when rolling the file. If more users were logged, each user would get their own files, and their own backups.
| # MultiFile Appender
The multiFile appender can be used to dynamically write logs to multiple files, based on a property of the logging event. Use this as a way to write separate log files for each category when the number of categories is unknown, for instance. It creates [file](file.md) appenders under the hood, so all the options that apply to that appender (apart from filename) can be used with this one, allowing the log files to be rotated and capped at a certain size.
## Configuration
- `type` - `"multiFile"`
- `base` - `string` - the base part of the generated log filename
- `property` - `string` - the value to use to split files (see below).
- `extension` - `string` - the suffix for the generated log filename.
- `timeout` - `integer` - optional activity timeout in ms after which the file will be closed.
All other properties will be passed to the created [file](file.md) appenders. For the property value, `categoryName` is probably the most useful - although you could use `pid` or `level`. If the property is not found then the appender will look for the value in the context map. If that fails, then the logger will not output the logging event, without an error. This is to allow for dynamic properties which may not exist for all log messages.
## Example (split on category)
```javascript
log4js.configure({
appenders: {
multi: {
type: "multiFile",
base: "logs/",
property: "categoryName",
extension: ".log",
},
},
categories: {
default: { appenders: ["multi"], level: "debug" },
},
});
const logger = log4js.getLogger();
logger.debug("I will be logged in logs/default.log");
const otherLogger = log4js.getLogger("cheese");
otherLogger.info("Cheese is cheddar - this will be logged in logs/cheese.log");
```
This example will result in two log files (`logs/default.log` and `logs/cheese.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: "multiFile",
base: "logs/",
property: "userID",
extension: ".log",
maxLogSize: 10485760,
backups: 3,
compress: true,
},
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
const userLogger = log4js.getLogger("user");
userLogger.addContext("userID", user.getID());
userLogger.info("this user just logged in");
```
This will result in one log file (`logs/u12345.log`), capped at 10Mb in size, with three backups kept when rolling the file. If more users were logged, each user would get their own files, and their own backups.
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./LICENSE | Copyright 2015 Gareth Jones (with contributions from many other people)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| Copyright 2015 Gareth Jones (with contributions from many other people)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./test/tap/consoleAppender-test.js | const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const consoleAppender = require('../../lib/appenders/console');
test('log4js console appender', (batch) => {
batch.test('should export a configure function', (t) => {
t.type(consoleAppender.configure, 'function');
t.end();
});
batch.test('should use default layout if none specified', (t) => {
const messages = [];
const fakeConsole = {
log(msg) {
messages.push(msg);
},
};
const log4js = sandbox.require('../../lib/log4js', {
globals: {
console: fakeConsole,
},
});
log4js.configure({
appenders: { console: { type: 'console' } },
categories: { default: { appenders: ['console'], level: 'DEBUG' } },
});
log4js.getLogger().info('blah');
t.match(messages[0], /.*default.*blah/);
t.end();
});
batch.test('should output to console', (t) => {
const messages = [];
const fakeConsole = {
log(msg) {
messages.push(msg);
},
};
const log4js = sandbox.require('../../lib/log4js', {
globals: {
console: fakeConsole,
},
});
log4js.configure({
appenders: {
console: { type: 'console', layout: { type: 'messagePassThrough' } },
},
categories: { default: { appenders: ['console'], level: 'DEBUG' } },
});
log4js.getLogger().info('blah');
t.equal(messages[0], 'blah');
t.end();
});
batch.end();
});
| const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const consoleAppender = require('../../lib/appenders/console');
test('log4js console appender', (batch) => {
batch.test('should export a configure function', (t) => {
t.type(consoleAppender.configure, 'function');
t.end();
});
batch.test('should use default layout if none specified', (t) => {
const messages = [];
const fakeConsole = {
log(msg) {
messages.push(msg);
},
};
const log4js = sandbox.require('../../lib/log4js', {
globals: {
console: fakeConsole,
},
});
log4js.configure({
appenders: { console: { type: 'console' } },
categories: { default: { appenders: ['console'], level: 'DEBUG' } },
});
log4js.getLogger().info('blah');
t.match(messages[0], /.*default.*blah/);
t.end();
});
batch.test('should output to console', (t) => {
const messages = [];
const fakeConsole = {
log(msg) {
messages.push(msg);
},
};
const log4js = sandbox.require('../../lib/log4js', {
globals: {
console: fakeConsole,
},
});
log4js.configure({
appenders: {
console: { type: 'console', layout: { type: 'messagePassThrough' } },
},
categories: { default: { appenders: ['console'], level: 'DEBUG' } },
});
log4js.getLogger().info('blah');
t.equal(messages[0], 'blah');
t.end();
});
batch.end();
});
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./docs/contrib-guidelines.md | # Want to help?
I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged:
- Fork the repo, make a feature branch just for your changes
- On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features.
- Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-)
- Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then).
| # Want to help?
I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged:
- Fork the repo, make a feature branch just for your changes
- On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features.
- Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-)
- Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then).
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./docs/webpack.md | # Working with webpack
Log4js uses dynamic require for loading appenders. Webpack doesn't know at build time which appender will be used at runtime so a small workaround is necessary.
```
const stdout = require('log4js/lib/appenders/stdout');
import * as Configuration from 'log4js/lib/configuration';
Configuration.prototype.loadAppenderModule = function(type) {
return stdout;
};
```
| # Working with webpack
Log4js uses dynamic require for loading appenders. Webpack doesn't know at build time which appender will be used at runtime so a small workaround is necessary.
```
const stdout = require('log4js/lib/appenders/stdout');
import * as Configuration from 'log4js/lib/configuration';
Configuration.prototype.loadAppenderModule = function(type) {
return stdout;
};
```
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./examples/logstashUDP.js | const log4js = require('../lib/log4js');
/*
Sample logstash config:
udp {
codec => json
port => 10001
queue_size => 2
workers => 2
type => myAppType
}
*/
log4js.configure({
appenders: {
console: {
type: 'console',
},
logstash: {
host: '127.0.0.1',
port: 10001,
type: 'logstashUDP',
logType: 'myAppType', // Optional, defaults to 'category'
fields: {
// Optional, will be added to the 'fields' object in logstash
field1: 'value1',
field2: 'value2',
},
layout: {
type: 'pattern',
pattern: '%m',
},
},
},
categories: {
default: { appenders: ['console', 'logstash'], level: 'info' },
},
});
const logger = log4js.getLogger('myLogger');
logger.info('Test log message %s', 'arg1', 'arg2');
| const log4js = require('../lib/log4js');
/*
Sample logstash config:
udp {
codec => json
port => 10001
queue_size => 2
workers => 2
type => myAppType
}
*/
log4js.configure({
appenders: {
console: {
type: 'console',
},
logstash: {
host: '127.0.0.1',
port: 10001,
type: 'logstashUDP',
logType: 'myAppType', // Optional, defaults to 'category'
fields: {
// Optional, will be added to the 'fields' object in logstash
field1: 'value1',
field2: 'value2',
},
layout: {
type: 'pattern',
pattern: '%m',
},
},
},
categories: {
default: { appenders: ['console', 'logstash'], level: 'info' },
},
});
const logger = log4js.getLogger('myLogger');
logger.info('Test log message %s', 'arg1', 'arg2');
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./types/test.ts | import * as log4js from './log4js';
log4js.configure('./filename');
const logger1 = log4js.getLogger();
logger1.level = 'debug';
logger1.debug('Some debug messages');
logger1.fatal({
whatever: 'foo',
});
const logger3 = log4js.getLogger('cheese');
logger3.trace('Entering cheese testing');
logger3.debug('Got cheese.');
logger3.info('Cheese is Gouda.');
logger3.warn('Cheese is quite smelly.');
logger3.error('Cheese is too ripe!');
logger3.fatal('Cheese was breeding ground for listeria.');
log4js.configure({
appenders: { cheese: { type: 'console', filename: 'cheese.log' } },
categories: { default: { appenders: ['cheese'], level: 'error' } },
});
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' },
},
categories: {
default: { appenders: ['out'], level: 'info' },
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID',
});
log4js.addLayout(
'json',
(config) =>
function (logEvent) {
return JSON.stringify(logEvent) + config.separator;
}
);
log4js.configure({
appenders: {
out: { type: 'stdout', layout: { type: 'json', separator: ',' } },
},
categories: {
default: { appenders: ['out'], level: 'info' },
},
});
log4js.configure({
appenders: {
file: { type: 'dateFile', filename: 'thing.log', pattern: '.mm' },
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
});
const logger4 = log4js.getLogger('thing');
logger4.log('logging a thing');
const logger5 = log4js.getLogger('json-test');
logger5.info('this is just a test');
logger5.error('of a custom appender');
logger5.warn('that outputs json');
log4js.shutdown();
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' },
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
},
});
const logger6 = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', {
some: 'otherObject',
useful_for: 'debug purposes',
});
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger6.trace('Entering cheese testing');
logger6.debug('Got cheese.');
logger6.info('Cheese is Gouda.');
logger6.log('Something funny about cheese.');
logger6.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger6.error('Cheese %s is too ripe!', 'gouda');
logger6.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
import { configure, getLogger } from './log4js';
configure('./filename');
const logger2 = getLogger();
logger2.level = 'debug';
logger2.debug('Some debug messages');
configure({
appenders: { cheese: { type: 'file', filename: 'cheese.log' } },
categories: { default: { appenders: ['cheese'], level: 'error' } },
});
log4js.configure('./filename').getLogger();
const logger7 = log4js.getLogger();
logger7.level = 'debug';
logger7.debug('Some debug messages');
const levels: log4js.Levels = log4js.levels;
const level: log4js.Level = levels.getLevel('info');
log4js.connectLogger(logger1, {
format: ':x, :y',
level: 'info',
context: true,
});
log4js.connectLogger(logger2, {
format: (req, _res, format) =>
format(
`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
),
});
//support for passing in an appender module
log4js.configure({
appenders: { thing: { type: { configure: () => () => {} } } },
categories: { default: { appenders: ['thing'], level: 'debug' } },
});
declare module './log4js' {
interface Appenders {
StorageTestAppender: {
type: 'storageTest';
storageMedium: 'dvd' | 'usb' | 'hdd';
};
}
}
log4js.configure({
appenders: { test: { type: 'storageTest', storageMedium: 'dvd' } },
categories: { default: { appenders: ['test'], level: 'debug' } },
});
log4js.configure({
appenders: { rec: { type: 'recording' } },
categories: { default: { appenders: ['rec'], level: 'debug' } },
});
const logger8 = log4js.getLogger();
logger8.level = 'debug';
logger8.debug('This will go to the recording!');
logger8.debug('Another one');
const recording = log4js.recording();
const loggingEvents = recording.playback();
if (loggingEvents.length !== 2) {
throw new Error(`Expected 2 recorded events, got ${loggingEvents.length}`);
}
if (loggingEvents[0].data[0] !== 'This will go to the recording!') {
throw new Error(
`Expected message 'This will go to the recording!', got ${loggingEvents[0].data[0]}`
);
}
if (loggingEvents[1].data[0] !== 'Another one') {
throw new Error(
`Expected message 'Another one', got ${loggingEvents[1].data[0]}`
);
}
recording.reset();
const loggingEventsPostReset = recording.playback();
if (loggingEventsPostReset.length !== 0) {
throw new Error(
`Expected 0 recorded events after reset, got ${loggingEventsPostReset.length}`
);
}
| import * as log4js from './log4js';
log4js.configure('./filename');
const logger1 = log4js.getLogger();
logger1.level = 'debug';
logger1.debug('Some debug messages');
logger1.fatal({
whatever: 'foo',
});
const logger3 = log4js.getLogger('cheese');
logger3.trace('Entering cheese testing');
logger3.debug('Got cheese.');
logger3.info('Cheese is Gouda.');
logger3.warn('Cheese is quite smelly.');
logger3.error('Cheese is too ripe!');
logger3.fatal('Cheese was breeding ground for listeria.');
log4js.configure({
appenders: { cheese: { type: 'console', filename: 'cheese.log' } },
categories: { default: { appenders: ['cheese'], level: 'error' } },
});
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' },
},
categories: {
default: { appenders: ['out'], level: 'info' },
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID',
});
log4js.addLayout(
'json',
(config) =>
function (logEvent) {
return JSON.stringify(logEvent) + config.separator;
}
);
log4js.configure({
appenders: {
out: { type: 'stdout', layout: { type: 'json', separator: ',' } },
},
categories: {
default: { appenders: ['out'], level: 'info' },
},
});
log4js.configure({
appenders: {
file: { type: 'dateFile', filename: 'thing.log', pattern: '.mm' },
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
});
const logger4 = log4js.getLogger('thing');
logger4.log('logging a thing');
const logger5 = log4js.getLogger('json-test');
logger5.info('this is just a test');
logger5.error('of a custom appender');
logger5.warn('that outputs json');
log4js.shutdown();
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' },
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
},
});
const logger6 = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', {
some: 'otherObject',
useful_for: 'debug purposes',
});
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger6.trace('Entering cheese testing');
logger6.debug('Got cheese.');
logger6.info('Cheese is Gouda.');
logger6.log('Something funny about cheese.');
logger6.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger6.error('Cheese %s is too ripe!', 'gouda');
logger6.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
import { configure, getLogger } from './log4js';
configure('./filename');
const logger2 = getLogger();
logger2.level = 'debug';
logger2.debug('Some debug messages');
configure({
appenders: { cheese: { type: 'file', filename: 'cheese.log' } },
categories: { default: { appenders: ['cheese'], level: 'error' } },
});
log4js.configure('./filename').getLogger();
const logger7 = log4js.getLogger();
logger7.level = 'debug';
logger7.debug('Some debug messages');
const levels: log4js.Levels = log4js.levels;
const level: log4js.Level = levels.getLevel('info');
log4js.connectLogger(logger1, {
format: ':x, :y',
level: 'info',
context: true,
});
log4js.connectLogger(logger2, {
format: (req, _res, format) =>
format(
`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
),
});
//support for passing in an appender module
log4js.configure({
appenders: { thing: { type: { configure: () => () => {} } } },
categories: { default: { appenders: ['thing'], level: 'debug' } },
});
declare module './log4js' {
interface Appenders {
StorageTestAppender: {
type: 'storageTest';
storageMedium: 'dvd' | 'usb' | 'hdd';
};
}
}
log4js.configure({
appenders: { test: { type: 'storageTest', storageMedium: 'dvd' } },
categories: { default: { appenders: ['test'], level: 'debug' } },
});
log4js.configure({
appenders: { rec: { type: 'recording' } },
categories: { default: { appenders: ['rec'], level: 'debug' } },
});
const logger8 = log4js.getLogger();
logger8.level = 'debug';
logger8.debug('This will go to the recording!');
logger8.debug('Another one');
const recording = log4js.recording();
const loggingEvents = recording.playback();
if (loggingEvents.length !== 2) {
throw new Error(`Expected 2 recorded events, got ${loggingEvents.length}`);
}
if (loggingEvents[0].data[0] !== 'This will go to the recording!') {
throw new Error(
`Expected message 'This will go to the recording!', got ${loggingEvents[0].data[0]}`
);
}
if (loggingEvents[1].data[0] !== 'Another one') {
throw new Error(
`Expected message 'Another one', got ${loggingEvents[1].data[0]}`
);
}
recording.reset();
const loggingEventsPostReset = recording.playback();
if (loggingEventsPostReset.length !== 0) {
throw new Error(
`Expected 0 recorded events after reset, got ${loggingEventsPostReset.length}`
);
}
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./docs/Gemfile | source 'https://rubygems.org'
gem 'github-pages', group: :jekyll_plugins
| source 'https://rubygems.org'
gem 'github-pages', group: :jekyll_plugins
| -1 |
log4js-node/log4js-node | 1,332 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined` | Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | lamweili | "2022-10-01T10:52:35Z" | "2022-10-01T10:57:05Z" | 916eef11f1d2aa2f32765f956f1f674745feb8b6 | 570ef530dc02d3e843a5421cb015bb8fadfe0b41 | fix(LoggingEvent): serde for `NaN`, `Infinity`, `-Infinity`, `undefined`. Fixes #1187
Supersedes PR #1188
## Affected Components
Only affects clustering, multiprocessAppender, and tcpAppender.
These three will `serialise()` to `String` to transmit for the receiver to `deserialise()`.
| Code | Object<br>(Input) | Serialised<br>(Transmission) | Deserialised<br>(Output) | Match
|-|-|-|-|-|
`{"a": Number("abc")}` | `{"a": NaN}` | `{"a": "__LOG4JS_NaN__"}` | `{"a": NaN}` | ✔️ |
`{"b": 1/0}` | `{"b": Infinity}` | `{"b": "__LOG4JS_Infinity__"}` | `{"b": Infinity}` | ✔️ |
`{"c": -1/0}` | `{"c": -Infinity}` | `{"c": "__LOG4JS_-Infinity__"}` | `{"c": -Infinity}` | ✔️ |
`[undefined]` | `[undefined]` | `["__LOG4JS_undefined__"]` | `[undefined]` | ✔️ |
Compared to PR #1188, now the output matches exactly the input. | ./.gitattributes | # Automatically normalize line endings for all text-based files
# http://git-scm.com/docs/gitattributes#_end_of_line_conversion
* text=auto
# For the following file types, normalize line endings to LF on
# checkin and prevent conversion to CRLF when they are checked out
# (this is required in order to prevent newline related issues like,
# for example, after the build script is run)
.* text eol=lf
*.css text eol=lf
*.scss text eol=lf
*.html text eol=lf
*.js text eol=lf
*.cjs text eol=lf
*.mjs text eol=lf
*.ts text eol=lf
*.cts text eol=lf
*.mts text eol=lf
*.json text eol=lf
*.md text eol=lf
*.sh text eol=lf
*.txt text eol=lf
*.xml text eol=lf
*.yml text eol=lf
.husky/* text eol=lf
# Exclude the `.htaccess` file from GitHub's language statistics
# https://github.com/github/linguist#using-gitattributes
dist/.htaccess linguist-vendored
| # Automatically normalize line endings for all text-based files
# http://git-scm.com/docs/gitattributes#_end_of_line_conversion
* text=auto
# For the following file types, normalize line endings to LF on
# checkin and prevent conversion to CRLF when they are checked out
# (this is required in order to prevent newline related issues like,
# for example, after the build script is run)
.* text eol=lf
*.css text eol=lf
*.scss text eol=lf
*.html text eol=lf
*.js text eol=lf
*.cjs text eol=lf
*.mjs text eol=lf
*.ts text eol=lf
*.cts text eol=lf
*.mts text eol=lf
*.json text eol=lf
*.md text eol=lf
*.sh text eol=lf
*.txt text eol=lf
*.xml text eol=lf
*.yml text eol=lf
.husky/* text eol=lf
# Exclude the `.htaccess` file from GitHub's language statistics
# https://github.com/github/linguist#using-gitattributes
dist/.htaccess linguist-vendored
| -1 |
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./.github/workflows/codeql-analysis.yml | # For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "master" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master" ]
schedule:
- cron: '15 11 * * 3'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# ℹ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
| # For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "master" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master" ]
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# ℹ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}"
| 1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./.github/workflows/node.js.yml | # This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Node.js CI
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
node-version: [8.x, 10.x, 12.x, 14.x, 16.x, 18.x]
os: [ubuntu-latest, windows-latest]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- run: npm ci
- run: npm run build --if-present
- run: npm test
- run: npm run typings
- run: npm audit
env:
NODE_ENV: production
| # This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Node.js CI
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
workflow_dispatch: {}
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
node-version: [8.x, 10.x, 12.x, 14.x, 16.x, 18.x]
os: [ubuntu-latest, windows-latest]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- run: npm ci
- run: npm run build --if-present
- run: npm test
- run: npm run typings
| 1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./.github/workflows/npm-publish.yml | # This workflow will run tests using node and then publish a package to GitHub Packages when a milestone is closed
# For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages
name: Node.js Package
on:
milestone:
types: [closed]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- run: npm ci
- run: npm test
- run: npm run typings
publish-npm:
needs: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
registry-url: https://registry.npmjs.org/
- run: npm ci
- run: |
git config user.name github-actions
git config user.email github-actions@github.com
- run: npm version ${{ github.event.milestone.title }}
- run: git push && git push --tags
- run: npm publish
env:
NODE_AUTH_TOKEN: ${{secrets.npm_token}}
| # This workflow will run tests using node and then publish a package to GitHub Packages when a milestone is closed
# For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages
name: Node.js Package
on:
milestone:
types: [closed]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 16
- run: npm ci
- run: npm test
- run: npm run typings
publish-npm:
needs: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 16
registry-url: https://registry.npmjs.org/
- run: npm ci
- run: |
git config user.name github-actions
git config user.email github-actions@github.com
- run: npm version ${{ github.event.milestone.title }}
- run: git push && git push --tags
- run: npm publish
env:
NODE_AUTH_TOKEN: ${{secrets.npm_token}}
| 1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./docs/_config.yml | theme: jekyll-theme-minimal
repository: nomiddlename/log4js-node
| theme: jekyll-theme-minimal
repository: nomiddlename/log4js-node
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./lib/appenders/multiprocess.js | const debug = require('debug')('log4js:multiprocess');
const net = require('net');
const LoggingEvent = require('../LoggingEvent');
const END_MSG = '__LOG4JS__';
/**
* Creates a server, listening on config.loggerPort, config.loggerHost.
* Output goes to config.actualAppender (config.appender is used to
* set up that appender).
*/
function logServer(config, actualAppender, levels) {
/**
* Takes a utf-8 string, returns an object with
* the correct log properties.
*/
function deserializeLoggingEvent(clientSocket, msg) {
debug('(master) deserialising log event');
const loggingEvent = LoggingEvent.deserialise(msg);
loggingEvent.remoteAddress = clientSocket.remoteAddress;
loggingEvent.remotePort = clientSocket.remotePort;
return loggingEvent;
}
const server = net.createServer((clientSocket) => {
debug('(master) connection received');
clientSocket.setEncoding('utf8');
let logMessage = '';
function logTheMessage(msg) {
debug('(master) deserialising log event and sending to actual appender');
actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
function chunkReceived(chunk) {
debug('(master) chunk of data received');
let event;
logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.slice(0, logMessage.indexOf(END_MSG));
logTheMessage(event);
logMessage = logMessage.slice(event.length + END_MSG.length) || '';
// check for more, maybe it was a big chunk
chunkReceived();
}
}
function handleError(error) {
const loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: levels.ERROR,
data: ['A worker log process hung up unexpectedly', error],
remoteAddress: clientSocket.remoteAddress,
remotePort: clientSocket.remotePort,
};
actualAppender(loggingEvent);
}
clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived);
clientSocket.on('error', handleError);
});
server.listen(
config.loggerPort || 5000,
config.loggerHost || 'localhost',
(e) => {
debug('(master) master server listening, error was ', e);
// allow the process to exit, if this is the only socket active
server.unref();
}
);
function app(event) {
debug('(master) log event sent directly to actual appender (local event)');
return actualAppender(event);
}
app.shutdown = function(cb) {
debug('(master) master shutdown called, closing server');
server.close(cb);
};
return app;
}
function workerAppender(config) {
let canWrite = false;
const buffer = [];
let socket;
let shutdownAttempts = 3;
function write(loggingEvent) {
debug('(worker) Writing log event to socket');
socket.write(loggingEvent.serialise(), 'utf8');
socket.write(END_MSG, 'utf8');
}
function emptyBuffer() {
let evt;
debug('(worker) emptying worker buffer');
while ((evt = buffer.shift())) {
write(evt);
}
}
function createSocket() {
debug(
`(worker) worker appender creating socket to ${config.loggerHost ||
'localhost'}:${config.loggerPort || 5000}`
);
socket = net.createConnection(
config.loggerPort || 5000,
config.loggerHost || 'localhost'
);
socket.on('connect', () => {
debug('(worker) worker socket connected');
emptyBuffer();
canWrite = true;
});
socket.on('timeout', socket.end.bind(socket));
socket.on('error', (e) => {
debug('connection error', e);
canWrite = false;
emptyBuffer();
});
socket.on('close', createSocket);
}
createSocket();
function log(loggingEvent) {
if (canWrite) {
write(loggingEvent);
} else {
debug(
'(worker) worker buffering log event because it cannot write at the moment'
);
buffer.push(loggingEvent);
}
}
log.shutdown = function(cb) {
debug('(worker) worker shutdown called');
if (buffer.length && shutdownAttempts) {
debug('(worker) worker buffer has items, waiting 100ms to empty');
shutdownAttempts -= 1;
setTimeout(() => {
log.shutdown(cb);
}, 100);
} else {
socket.removeAllListeners('close');
socket.end(cb);
}
};
return log;
}
function createAppender(config, appender, levels) {
if (config.mode === 'master') {
debug('Creating master appender');
return logServer(config, appender, levels);
}
debug('Creating worker appender');
return workerAppender(config);
}
function configure(config, layouts, findAppender, levels) {
let appender;
debug(`configure with mode = ${config.mode}`);
if (config.mode === 'master') {
if (!config.appender) {
debug(`no appender found in config ${config}`);
throw new Error('multiprocess master must have an "appender" defined');
}
debug(`actual appender is ${config.appender}`);
appender = findAppender(config.appender);
if (!appender) {
debug(`actual appender "${config.appender}" not found`);
throw new Error(
`multiprocess master appender "${config.appender}" not defined`
);
}
}
return createAppender(config, appender, levels);
}
module.exports.configure = configure;
| const debug = require('debug')('log4js:multiprocess');
const net = require('net');
const LoggingEvent = require('../LoggingEvent');
const END_MSG = '__LOG4JS__';
/**
* Creates a server, listening on config.loggerPort, config.loggerHost.
* Output goes to config.actualAppender (config.appender is used to
* set up that appender).
*/
function logServer(config, actualAppender, levels) {
/**
* Takes a utf-8 string, returns an object with
* the correct log properties.
*/
function deserializeLoggingEvent(clientSocket, msg) {
debug('(master) deserialising log event');
const loggingEvent = LoggingEvent.deserialise(msg);
loggingEvent.remoteAddress = clientSocket.remoteAddress;
loggingEvent.remotePort = clientSocket.remotePort;
return loggingEvent;
}
const server = net.createServer((clientSocket) => {
debug('(master) connection received');
clientSocket.setEncoding('utf8');
let logMessage = '';
function logTheMessage(msg) {
debug('(master) deserialising log event and sending to actual appender');
actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
function chunkReceived(chunk) {
debug('(master) chunk of data received');
let event;
logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.slice(0, logMessage.indexOf(END_MSG));
logTheMessage(event);
logMessage = logMessage.slice(event.length + END_MSG.length) || '';
// check for more, maybe it was a big chunk
chunkReceived();
}
}
function handleError(error) {
const loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: levels.ERROR,
data: ['A worker log process hung up unexpectedly', error],
remoteAddress: clientSocket.remoteAddress,
remotePort: clientSocket.remotePort,
};
actualAppender(loggingEvent);
}
clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived);
clientSocket.on('error', handleError);
});
server.listen(
config.loggerPort || 5000,
config.loggerHost || 'localhost',
(e) => {
debug('(master) master server listening, error was ', e);
// allow the process to exit, if this is the only socket active
server.unref();
}
);
function app(event) {
debug('(master) log event sent directly to actual appender (local event)');
return actualAppender(event);
}
app.shutdown = function(cb) {
debug('(master) master shutdown called, closing server');
server.close(cb);
};
return app;
}
function workerAppender(config) {
let canWrite = false;
const buffer = [];
let socket;
let shutdownAttempts = 3;
function write(loggingEvent) {
debug('(worker) Writing log event to socket');
socket.write(loggingEvent.serialise(), 'utf8');
socket.write(END_MSG, 'utf8');
}
function emptyBuffer() {
let evt;
debug('(worker) emptying worker buffer');
while ((evt = buffer.shift())) {
write(evt);
}
}
function createSocket() {
debug(
`(worker) worker appender creating socket to ${config.loggerHost ||
'localhost'}:${config.loggerPort || 5000}`
);
socket = net.createConnection(
config.loggerPort || 5000,
config.loggerHost || 'localhost'
);
socket.on('connect', () => {
debug('(worker) worker socket connected');
emptyBuffer();
canWrite = true;
});
socket.on('timeout', socket.end.bind(socket));
socket.on('error', (e) => {
debug('connection error', e);
canWrite = false;
emptyBuffer();
});
socket.on('close', createSocket);
}
createSocket();
function log(loggingEvent) {
if (canWrite) {
write(loggingEvent);
} else {
debug(
'(worker) worker buffering log event because it cannot write at the moment'
);
buffer.push(loggingEvent);
}
}
log.shutdown = function(cb) {
debug('(worker) worker shutdown called');
if (buffer.length && shutdownAttempts) {
debug('(worker) worker buffer has items, waiting 100ms to empty');
shutdownAttempts -= 1;
setTimeout(() => {
log.shutdown(cb);
}, 100);
} else {
socket.removeAllListeners('close');
socket.end(cb);
}
};
return log;
}
function createAppender(config, appender, levels) {
if (config.mode === 'master') {
debug('Creating master appender');
return logServer(config, appender, levels);
}
debug('Creating worker appender');
return workerAppender(config);
}
function configure(config, layouts, findAppender, levels) {
let appender;
debug(`configure with mode = ${config.mode}`);
if (config.mode === 'master') {
if (!config.appender) {
debug(`no appender found in config ${config}`);
throw new Error('multiprocess master must have an "appender" defined');
}
debug(`actual appender is ${config.appender}`);
appender = findAppender(config.appender);
if (!appender) {
debug(`actual appender "${config.appender}" not found`);
throw new Error(
`multiprocess master appender "${config.appender}" not defined`
);
}
}
return createAppender(config, appender, levels);
}
module.exports.configure = configure;
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./test/tap/logging-test.js | const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const util = require('util');
const recording = require('../../lib/appenders/recording');
test('log4js', (batch) => {
batch.test(
'shutdown should return appenders and categories back to initial state',
(t) => {
const stringifyMap = (map) => JSON.stringify(Array.from(map));
const deepCopyMap = (map) => new Map(JSON.parse(stringifyMap(map)));
const log4js = require('../../lib/log4js');
const appenders = require('../../lib/appenders');
const categories = require('../../lib/categories');
const initialAppenders = deepCopyMap(appenders);
const initialCategories = deepCopyMap(categories);
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const configuredAppenders = deepCopyMap(appenders);
const configuredCategories = deepCopyMap(categories);
t.not(
stringifyMap(configuredAppenders),
stringifyMap(initialAppenders),
'appenders should be different from initial state'
);
t.not(
stringifyMap(configuredCategories),
stringifyMap(initialCategories),
'categories should be different from initial state'
);
log4js.shutdown(() => {
const finalAppenders = deepCopyMap(appenders);
const finalCategories = deepCopyMap(categories);
t.equal(
stringifyMap(finalAppenders),
stringifyMap(initialAppenders),
'appenders should revert back to initial state'
);
t.equal(
stringifyMap(finalCategories),
stringifyMap(initialCategories),
'categories should revert back to initial state'
);
t.end();
});
}
);
batch.test('getLogger', (t) => {
const log4js = require('../../lib/log4js');
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const logger = log4js.getLogger('tests');
t.test('should take a category and return a logger', (assert) => {
assert.equal(logger.category, 'tests');
assert.equal(logger.level.toString(), 'DEBUG');
assert.type(logger.debug, 'function');
assert.type(logger.info, 'function');
assert.type(logger.warn, 'function');
assert.type(logger.error, 'function');
assert.type(logger.fatal, 'function');
assert.end();
});
t.test('log events', (assert) => {
recording.reset();
logger.debug('Debug event');
logger.trace('Trace event 1');
logger.trace('Trace event 2');
logger.warn('Warning event');
logger.error('Aargh!', new Error('Pants are on fire!'));
logger.error('Simulated CouchDB problem', {
err: 127,
cause: 'incendiary underwear',
});
const events = recording.replay();
assert.equal(events[0].level.toString(), 'DEBUG');
assert.equal(events[0].data[0], 'Debug event');
assert.type(events[0].startTime, 'Date');
assert.equal(events.length, 4, 'should not emit events of a lower level');
assert.equal(events[1].level.toString(), 'WARN');
assert.type(
events[2].data[1],
'Error',
'should include the error if passed in'
);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
assert.end();
});
t.end();
});
batch.test('when shutdown is called', (t) => {
const events = {
shutdownCalled: [],
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/file': {
name: 'file',
configure() {
function thing(evt) {
events.event = evt;
return null;
}
thing.shutdown = function(cb) {
events.shutdownCalled.push(true);
cb();
};
return thing;
},
},
},
});
const config = {
appenders: {
file: {
type: 'file',
filename: 'cheesy-wotsits.log',
maxLogSize: 1024,
backups: 3,
},
alsoFile: {
type: 'file',
},
},
categories: {
default: { appenders: ['file', 'alsoFile'], level: 'DEBUG' },
},
};
log4js.configure(config);
const logger = log4js.getLogger();
log4js.shutdown(() => {
t.equal(
events.shutdownCalled.length,
2,
'should invoke appender shutdowns'
);
logger.info('this should not go to the appenders');
logger.log('info', 'this should not go to the appenders');
logger._log(require('../../lib/levels').INFO, [
'this should not go to the appenders',
]);
t.notOk(events.event);
t.end();
});
});
batch.test('configuration when passed as filename', (t) => {
let appenderConfig;
let configFilename;
const log4js = sandbox.require('../../lib/log4js', {
ignoreMissing: true,
requires: {
fs: {
statSync() {
return { mtime: Date.now() };
},
readFileSync(filename) {
configFilename = filename;
return JSON.stringify({
appenders: {
file: {
type: 'file',
filename: 'whatever.log',
},
},
categories: { default: { appenders: ['file'], level: 'DEBUG' } },
});
},
readdirSync() {
return ['file'];
},
},
'./file': {
configure(configuration) {
appenderConfig = configuration;
return function() {};
},
},
},
});
log4js.configure('/path/to/cheese.json');
t.equal(
configFilename,
'/path/to/cheese.json',
'should read the config from a file'
);
t.equal(
appenderConfig.filename,
'whatever.log',
'should pass config to appender'
);
t.end();
});
batch.test('with configure not called', (t) => {
const fakeStdoutAppender = {
configure() {
this.required = true;
return function(evt) {
fakeStdoutAppender.evt = evt;
};
},
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/stdout': fakeStdoutAppender,
},
});
const logger = log4js.getLogger('some-logger');
logger.debug('This is a test');
t.ok(fakeStdoutAppender.required, 'stdout should be required');
t.notOk(fakeStdoutAppender.evt, 'should not log anything');
t.end();
});
batch.test('with configure called with empty values', (t) => {
[null, undefined, '', ' ', []].forEach((config) => {
const log4js = require('../../lib/log4js');
const expectedError = `Problem reading config from file "${util.inspect(
config
)}". Error was ENOENT: no such file or directory`;
t.throws(() => log4js.configure(config), expectedError);
});
t.end();
});
batch.test('configuration persistence', (t) => {
const firstLog4js = require('../../lib/log4js');
firstLog4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
recording.reset();
const secondLog4js = require('../../lib/log4js');
secondLog4js
.getLogger()
.info('This should go to the appender defined in firstLog4js');
t.equal(
recording.replay()[0].data[0],
'This should go to the appender defined in firstLog4js'
);
t.end();
});
batch.end();
});
| const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const util = require('util');
const recording = require('../../lib/appenders/recording');
test('log4js', (batch) => {
batch.test(
'shutdown should return appenders and categories back to initial state',
(t) => {
const stringifyMap = (map) => JSON.stringify(Array.from(map));
const deepCopyMap = (map) => new Map(JSON.parse(stringifyMap(map)));
const log4js = require('../../lib/log4js');
const appenders = require('../../lib/appenders');
const categories = require('../../lib/categories');
const initialAppenders = deepCopyMap(appenders);
const initialCategories = deepCopyMap(categories);
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const configuredAppenders = deepCopyMap(appenders);
const configuredCategories = deepCopyMap(categories);
t.not(
stringifyMap(configuredAppenders),
stringifyMap(initialAppenders),
'appenders should be different from initial state'
);
t.not(
stringifyMap(configuredCategories),
stringifyMap(initialCategories),
'categories should be different from initial state'
);
log4js.shutdown(() => {
const finalAppenders = deepCopyMap(appenders);
const finalCategories = deepCopyMap(categories);
t.equal(
stringifyMap(finalAppenders),
stringifyMap(initialAppenders),
'appenders should revert back to initial state'
);
t.equal(
stringifyMap(finalCategories),
stringifyMap(initialCategories),
'categories should revert back to initial state'
);
t.end();
});
}
);
batch.test('getLogger', (t) => {
const log4js = require('../../lib/log4js');
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const logger = log4js.getLogger('tests');
t.test('should take a category and return a logger', (assert) => {
assert.equal(logger.category, 'tests');
assert.equal(logger.level.toString(), 'DEBUG');
assert.type(logger.debug, 'function');
assert.type(logger.info, 'function');
assert.type(logger.warn, 'function');
assert.type(logger.error, 'function');
assert.type(logger.fatal, 'function');
assert.end();
});
t.test('log events', (assert) => {
recording.reset();
logger.debug('Debug event');
logger.trace('Trace event 1');
logger.trace('Trace event 2');
logger.warn('Warning event');
logger.error('Aargh!', new Error('Pants are on fire!'));
logger.error('Simulated CouchDB problem', {
err: 127,
cause: 'incendiary underwear',
});
const events = recording.replay();
assert.equal(events[0].level.toString(), 'DEBUG');
assert.equal(events[0].data[0], 'Debug event');
assert.type(events[0].startTime, 'Date');
assert.equal(events.length, 4, 'should not emit events of a lower level');
assert.equal(events[1].level.toString(), 'WARN');
assert.type(
events[2].data[1],
'Error',
'should include the error if passed in'
);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
assert.end();
});
t.end();
});
batch.test('when shutdown is called', (t) => {
const events = {
shutdownCalled: [],
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/file': {
name: 'file',
configure() {
function thing(evt) {
events.event = evt;
return null;
}
thing.shutdown = function(cb) {
events.shutdownCalled.push(true);
cb();
};
return thing;
},
},
},
});
const config = {
appenders: {
file: {
type: 'file',
filename: 'cheesy-wotsits.log',
maxLogSize: 1024,
backups: 3,
},
alsoFile: {
type: 'file',
},
},
categories: {
default: { appenders: ['file', 'alsoFile'], level: 'DEBUG' },
},
};
log4js.configure(config);
const logger = log4js.getLogger();
log4js.shutdown(() => {
t.equal(
events.shutdownCalled.length,
2,
'should invoke appender shutdowns'
);
logger.info('this should not go to the appenders');
logger.log('info', 'this should not go to the appenders');
logger._log(require('../../lib/levels').INFO, [
'this should not go to the appenders',
]);
t.notOk(events.event);
t.end();
});
});
batch.test('configuration when passed as filename', (t) => {
let appenderConfig;
let configFilename;
const log4js = sandbox.require('../../lib/log4js', {
ignoreMissing: true,
requires: {
fs: {
statSync() {
return { mtime: Date.now() };
},
readFileSync(filename) {
configFilename = filename;
return JSON.stringify({
appenders: {
file: {
type: 'file',
filename: 'whatever.log',
},
},
categories: { default: { appenders: ['file'], level: 'DEBUG' } },
});
},
readdirSync() {
return ['file'];
},
},
'./file': {
configure(configuration) {
appenderConfig = configuration;
return function() {};
},
},
},
});
log4js.configure('/path/to/cheese.json');
t.equal(
configFilename,
'/path/to/cheese.json',
'should read the config from a file'
);
t.equal(
appenderConfig.filename,
'whatever.log',
'should pass config to appender'
);
t.end();
});
batch.test('with configure not called', (t) => {
const fakeStdoutAppender = {
configure() {
this.required = true;
return function(evt) {
fakeStdoutAppender.evt = evt;
};
},
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/stdout': fakeStdoutAppender,
},
});
const logger = log4js.getLogger('some-logger');
logger.debug('This is a test');
t.ok(fakeStdoutAppender.required, 'stdout should be required');
t.notOk(fakeStdoutAppender.evt, 'should not log anything');
t.end();
});
batch.test('with configure called with empty values', (t) => {
[null, undefined, '', ' ', []].forEach((config) => {
const log4js = require('../../lib/log4js');
const expectedError = `Problem reading config from file "${util.inspect(
config
)}". Error was ENOENT: no such file or directory`;
t.throws(() => log4js.configure(config), expectedError);
});
t.end();
});
batch.test('configuration persistence', (t) => {
const firstLog4js = require('../../lib/log4js');
firstLog4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
recording.reset();
const secondLog4js = require('../../lib/log4js');
secondLog4js
.getLogger()
.info('This should go to the appender defined in firstLog4js');
t.equal(
recording.replay()[0].data[0],
'This should go to the appender defined in firstLog4js'
);
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./test/tap/noLogFilter-test.js | const { test } = require('tap');
const log4js = require('../../lib/log4js');
const recording = require('../../lib/appenders/recording');
/**
* test a simple regexp
*/
test('log4js noLogFilter', (batch) => {
batch.beforeEach((done) => {
recording.reset();
if (typeof done === 'function') {
done();
}
});
batch.test(
'appender should exclude events that match the regexp string',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: 'This.*not',
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should not get logged');
logger.debug('This should get logged');
logger.debug(
'Another case that not match the regex, so it should get logged'
);
const logEvents = recording.replay();
t.equal(logEvents.length, 2);
t.equal(logEvents[0].data[0], 'This should get logged');
t.equal(
logEvents[1].data[0],
'Another case that not match the regex, so it should get logged'
);
t.end();
}
);
/**
* test an array of regexp
*/
batch.test(
'appender should exclude events that match the regexp string contained in the array',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: ['This.*not', 'instead'],
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should not get logged');
logger.debug('This should get logged');
logger.debug(
'Another case that not match the regex, so it should get logged'
);
logger.debug('This case instead it should get logged');
logger.debug('The last that should get logged');
const logEvents = recording.replay();
t.equal(logEvents.length, 3);
t.equal(logEvents[0].data[0], 'This should get logged');
t.equal(
logEvents[1].data[0],
'Another case that not match the regex, so it should get logged'
);
t.equal(logEvents[2].data[0], 'The last that should get logged');
t.end();
}
);
/**
* test case insentitive regexp
*/
batch.test(
'appender should evaluate the regexp using incase sentitive option',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: ['NOT', 'eX.*de'],
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should not get logged');
logger.debug('This should get logged');
logger.debug('Exclude this string');
logger.debug('Include this string');
const logEvents = recording.replay();
t.equal(logEvents.length, 2);
t.equal(logEvents[0].data[0], 'This should get logged');
t.equal(logEvents[1].data[0], 'Include this string');
t.end();
}
);
/**
* test empty string or null regexp
*/
batch.test(
'appender should skip the match in case of empty or null regexp',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: ['', null, undefined],
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should get logged');
logger.debug('Another string that should get logged');
const logEvents = recording.replay();
t.equal(logEvents.length, 2);
t.equal(logEvents[0].data[0], 'This should get logged');
t.equal(logEvents[1].data[0], 'Another string that should get logged');
t.end();
}
);
/**
* test for excluding all the events that contains digits
*/
batch.test('appender should exclude the events that contains digits', (t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: '\\d',
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should get logged');
logger.debug('The 2nd event should not get logged');
logger.debug('The 3rd event should not get logged, such as the 2nd');
const logEvents = recording.replay();
t.equal(logEvents.length, 1);
t.equal(logEvents[0].data[0], 'This should get logged');
t.end();
});
/**
* test the cases provided in the documentation
* https://log4js-node.github.io/log4js-node/noLogFilter.html
*/
batch.test(
'appender should exclude not valid events according to the documentation',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: ['NOT', '\\d', ''],
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
logger.debug('I will be not logged in all-the-logs.log');
logger.debug('A 2nd message that will be excluded in all-the-logs.log');
logger.debug('Hello again');
const logEvents = recording.replay();
t.equal(logEvents.length, 2);
t.equal(logEvents[0].data[0], 'I will be logged in all-the-logs.log');
t.equal(logEvents[1].data[0], 'Hello again');
t.end();
}
);
batch.end();
});
| const { test } = require('tap');
const log4js = require('../../lib/log4js');
const recording = require('../../lib/appenders/recording');
/**
* test a simple regexp
*/
test('log4js noLogFilter', (batch) => {
batch.beforeEach((done) => {
recording.reset();
if (typeof done === 'function') {
done();
}
});
batch.test(
'appender should exclude events that match the regexp string',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: 'This.*not',
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should not get logged');
logger.debug('This should get logged');
logger.debug(
'Another case that not match the regex, so it should get logged'
);
const logEvents = recording.replay();
t.equal(logEvents.length, 2);
t.equal(logEvents[0].data[0], 'This should get logged');
t.equal(
logEvents[1].data[0],
'Another case that not match the regex, so it should get logged'
);
t.end();
}
);
/**
* test an array of regexp
*/
batch.test(
'appender should exclude events that match the regexp string contained in the array',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: ['This.*not', 'instead'],
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should not get logged');
logger.debug('This should get logged');
logger.debug(
'Another case that not match the regex, so it should get logged'
);
logger.debug('This case instead it should get logged');
logger.debug('The last that should get logged');
const logEvents = recording.replay();
t.equal(logEvents.length, 3);
t.equal(logEvents[0].data[0], 'This should get logged');
t.equal(
logEvents[1].data[0],
'Another case that not match the regex, so it should get logged'
);
t.equal(logEvents[2].data[0], 'The last that should get logged');
t.end();
}
);
/**
* test case insentitive regexp
*/
batch.test(
'appender should evaluate the regexp using incase sentitive option',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: ['NOT', 'eX.*de'],
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should not get logged');
logger.debug('This should get logged');
logger.debug('Exclude this string');
logger.debug('Include this string');
const logEvents = recording.replay();
t.equal(logEvents.length, 2);
t.equal(logEvents[0].data[0], 'This should get logged');
t.equal(logEvents[1].data[0], 'Include this string');
t.end();
}
);
/**
* test empty string or null regexp
*/
batch.test(
'appender should skip the match in case of empty or null regexp',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: ['', null, undefined],
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should get logged');
logger.debug('Another string that should get logged');
const logEvents = recording.replay();
t.equal(logEvents.length, 2);
t.equal(logEvents[0].data[0], 'This should get logged');
t.equal(logEvents[1].data[0], 'Another string that should get logged');
t.end();
}
);
/**
* test for excluding all the events that contains digits
*/
batch.test('appender should exclude the events that contains digits', (t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: '\\d',
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('This should get logged');
logger.debug('The 2nd event should not get logged');
logger.debug('The 3rd event should not get logged, such as the 2nd');
const logEvents = recording.replay();
t.equal(logEvents.length, 1);
t.equal(logEvents[0].data[0], 'This should get logged');
t.end();
});
/**
* test the cases provided in the documentation
* https://log4js-node.github.io/log4js-node/noLogFilter.html
*/
batch.test(
'appender should exclude not valid events according to the documentation',
(t) => {
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'noLogFilter',
exclude: ['NOT', '\\d', ''],
appender: 'recorder',
},
},
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
});
const logger = log4js.getLogger();
logger.debug('I will be logged in all-the-logs.log');
logger.debug('I will be not logged in all-the-logs.log');
logger.debug('A 2nd message that will be excluded in all-the-logs.log');
logger.debug('Hello again');
const logEvents = recording.replay();
t.equal(logEvents.length, 2);
t.equal(logEvents[0].data[0], 'I will be logged in all-the-logs.log');
t.equal(logEvents[1].data[0], 'Hello again');
t.end();
}
);
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./.git/hooks/pre-rebase.sample | #!/bin/sh
#
# Copyright (c) 2006, 2008 Junio C Hamano
#
# The "pre-rebase" hook is run just before "git rebase" starts doing
# its job, and can prevent the command from running by exiting with
# non-zero status.
#
# The hook is called with the following parameters:
#
# $1 -- the upstream the series was forked from.
# $2 -- the branch being rebased (or empty when rebasing the current branch).
#
# This sample shows how to prevent topic branches that are already
# merged to 'next' branch from getting rebased, because allowing it
# would result in rebasing already published history.
publish=next
basebranch="$1"
if test "$#" = 2
then
topic="refs/heads/$2"
else
topic=`git symbolic-ref HEAD` ||
exit 0 ;# we do not interrupt rebasing detached HEAD
fi
case "$topic" in
refs/heads/??/*)
;;
*)
exit 0 ;# we do not interrupt others.
;;
esac
# Now we are dealing with a topic branch being rebased
# on top of master. Is it OK to rebase it?
# Does the topic really exist?
git show-ref -q "$topic" || {
echo >&2 "No such branch $topic"
exit 1
}
# Is topic fully merged to master?
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
if test -z "$not_in_master"
then
echo >&2 "$topic is fully merged to master; better remove it."
exit 1 ;# we could allow it, but there is no point.
fi
# Is topic ever merged to next? If so you should not be rebasing it.
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
only_next_2=`git rev-list ^master ${publish} | sort`
if test "$only_next_1" = "$only_next_2"
then
not_in_topic=`git rev-list "^$topic" master`
if test -z "$not_in_topic"
then
echo >&2 "$topic is already up to date with master"
exit 1 ;# we could allow it, but there is no point.
else
exit 0
fi
else
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
/usr/bin/perl -e '
my $topic = $ARGV[0];
my $msg = "* $topic has commits already merged to public branch:\n";
my (%not_in_next) = map {
/^([0-9a-f]+) /;
($1 => 1);
} split(/\n/, $ARGV[1]);
for my $elem (map {
/^([0-9a-f]+) (.*)$/;
[$1 => $2];
} split(/\n/, $ARGV[2])) {
if (!exists $not_in_next{$elem->[0]}) {
if ($msg) {
print STDERR $msg;
undef $msg;
}
print STDERR " $elem->[1]\n";
}
}
' "$topic" "$not_in_next" "$not_in_master"
exit 1
fi
<<\DOC_END
This sample hook safeguards topic branches that have been
published from being rewound.
The workflow assumed here is:
* Once a topic branch forks from "master", "master" is never
merged into it again (either directly or indirectly).
* Once a topic branch is fully cooked and merged into "master",
it is deleted. If you need to build on top of it to correct
earlier mistakes, a new topic branch is created by forking at
the tip of the "master". This is not strictly necessary, but
it makes it easier to keep your history simple.
* Whenever you need to test or publish your changes to topic
branches, merge them into "next" branch.
The script, being an example, hardcodes the publish branch name
to be "next", but it is trivial to make it configurable via
$GIT_DIR/config mechanism.
With this workflow, you would want to know:
(1) ... if a topic branch has ever been merged to "next". Young
topic branches can have stupid mistakes you would rather
clean up before publishing, and things that have not been
merged into other branches can be easily rebased without
affecting other people. But once it is published, you would
not want to rewind it.
(2) ... if a topic branch has been fully merged to "master".
Then you can delete it. More importantly, you should not
build on top of it -- other people may already want to
change things related to the topic as patches against your
"master", so if you need further changes, it is better to
fork the topic (perhaps with the same name) afresh from the
tip of "master".
Let's look at this example:
o---o---o---o---o---o---o---o---o---o "next"
/ / / /
/ a---a---b A / /
/ / / /
/ / c---c---c---c B /
/ / / \ /
/ / / b---b C \ /
/ / / / \ /
---o---o---o---o---o---o---o---o---o---o---o "master"
A, B and C are topic branches.
* A has one fix since it was merged up to "next".
* B has finished. It has been fully merged up to "master" and "next",
and is ready to be deleted.
* C has not merged to "next" at all.
We would want to allow C to be rebased, refuse A, and encourage
B to be deleted.
To compute (1):
git rev-list ^master ^topic next
git rev-list ^master next
if these match, topic has not merged in next at all.
To compute (2):
git rev-list master..topic
if this is empty, it is fully merged to "master".
DOC_END
| #!/bin/sh
#
# Copyright (c) 2006, 2008 Junio C Hamano
#
# The "pre-rebase" hook is run just before "git rebase" starts doing
# its job, and can prevent the command from running by exiting with
# non-zero status.
#
# The hook is called with the following parameters:
#
# $1 -- the upstream the series was forked from.
# $2 -- the branch being rebased (or empty when rebasing the current branch).
#
# This sample shows how to prevent topic branches that are already
# merged to 'next' branch from getting rebased, because allowing it
# would result in rebasing already published history.
publish=next
basebranch="$1"
if test "$#" = 2
then
topic="refs/heads/$2"
else
topic=`git symbolic-ref HEAD` ||
exit 0 ;# we do not interrupt rebasing detached HEAD
fi
case "$topic" in
refs/heads/??/*)
;;
*)
exit 0 ;# we do not interrupt others.
;;
esac
# Now we are dealing with a topic branch being rebased
# on top of master. Is it OK to rebase it?
# Does the topic really exist?
git show-ref -q "$topic" || {
echo >&2 "No such branch $topic"
exit 1
}
# Is topic fully merged to master?
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
if test -z "$not_in_master"
then
echo >&2 "$topic is fully merged to master; better remove it."
exit 1 ;# we could allow it, but there is no point.
fi
# Is topic ever merged to next? If so you should not be rebasing it.
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
only_next_2=`git rev-list ^master ${publish} | sort`
if test "$only_next_1" = "$only_next_2"
then
not_in_topic=`git rev-list "^$topic" master`
if test -z "$not_in_topic"
then
echo >&2 "$topic is already up to date with master"
exit 1 ;# we could allow it, but there is no point.
else
exit 0
fi
else
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
/usr/bin/perl -e '
my $topic = $ARGV[0];
my $msg = "* $topic has commits already merged to public branch:\n";
my (%not_in_next) = map {
/^([0-9a-f]+) /;
($1 => 1);
} split(/\n/, $ARGV[1]);
for my $elem (map {
/^([0-9a-f]+) (.*)$/;
[$1 => $2];
} split(/\n/, $ARGV[2])) {
if (!exists $not_in_next{$elem->[0]}) {
if ($msg) {
print STDERR $msg;
undef $msg;
}
print STDERR " $elem->[1]\n";
}
}
' "$topic" "$not_in_next" "$not_in_master"
exit 1
fi
<<\DOC_END
This sample hook safeguards topic branches that have been
published from being rewound.
The workflow assumed here is:
* Once a topic branch forks from "master", "master" is never
merged into it again (either directly or indirectly).
* Once a topic branch is fully cooked and merged into "master",
it is deleted. If you need to build on top of it to correct
earlier mistakes, a new topic branch is created by forking at
the tip of the "master". This is not strictly necessary, but
it makes it easier to keep your history simple.
* Whenever you need to test or publish your changes to topic
branches, merge them into "next" branch.
The script, being an example, hardcodes the publish branch name
to be "next", but it is trivial to make it configurable via
$GIT_DIR/config mechanism.
With this workflow, you would want to know:
(1) ... if a topic branch has ever been merged to "next". Young
topic branches can have stupid mistakes you would rather
clean up before publishing, and things that have not been
merged into other branches can be easily rebased without
affecting other people. But once it is published, you would
not want to rewind it.
(2) ... if a topic branch has been fully merged to "master".
Then you can delete it. More importantly, you should not
build on top of it -- other people may already want to
change things related to the topic as patches against your
"master", so if you need further changes, it is better to
fork the topic (perhaps with the same name) afresh from the
tip of "master".
Let's look at this example:
o---o---o---o---o---o---o---o---o---o "next"
/ / / /
/ a---a---b A / /
/ / / /
/ / c---c---c---c B /
/ / / \ /
/ / / b---b C \ /
/ / / / \ /
---o---o---o---o---o---o---o---o---o---o---o "master"
A, B and C are topic branches.
* A has one fix since it was merged up to "next".
* B has finished. It has been fully merged up to "master" and "next",
and is ready to be deleted.
* C has not merged to "next" at all.
We would want to allow C to be rebased, refuse A, and encourage
B to be deleted.
To compute (1):
git rev-list ^master ^topic next
git rev-list ^master next
if these match, topic has not merged in next at all.
To compute (2):
git rev-list master..topic
if this is empty, it is fully merged to "master".
DOC_END
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./docs/console.md | # Console Appender
This appender uses node's console object to write log events. It can also be used in the browser, if you're using browserify or something similar. Be aware that writing a high volume of output to the console can make your application use a lot of memory. If you experience this problem, try switching to the [stdout](stdout.md) appender.
# Configuration
- `type` - `console`
- `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
Note that all log events are output using `console.log` regardless of the event's level (so `ERROR` events will not be logged using `console.error`).
# Example
```javascript
log4js.configure({
appenders: { console: { type: "console" } },
categories: { default: { appenders: ["console"], level: "info" } },
});
```
| # Console Appender
This appender uses node's console object to write log events. It can also be used in the browser, if you're using browserify or something similar. Be aware that writing a high volume of output to the console can make your application use a lot of memory. If you experience this problem, try switching to the [stdout](stdout.md) appender.
# Configuration
- `type` - `console`
- `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
Note that all log events are output using `console.log` regardless of the event's level (so `ERROR` events will not be logged using `console.error`).
# Example
```javascript
log4js.configure({
appenders: { console: { type: "console" } },
categories: { default: { appenders: ["console"], level: "info" } },
});
```
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./lib/clustering.js | const debug = require('debug')('log4js:clustering');
const LoggingEvent = require('./LoggingEvent');
const configuration = require('./configuration');
let disabled = false;
let cluster = null;
try {
// eslint-disable-next-line global-require
cluster = require('cluster');
} catch (e) {
debug('cluster module not present');
disabled = true;
}
const listeners = [];
let pm2 = false;
let pm2InstanceVar = 'NODE_APP_INSTANCE';
const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0';
const isMaster = () =>
disabled || (cluster && cluster.isMaster) || isPM2Master();
const sendToListeners = (logEvent) => {
listeners.forEach((l) => l(logEvent));
};
// in a multi-process node environment, worker loggers will use
// process.send
const receiver = (worker, message) => {
// prior to node v6, the worker parameter was not passed (args were message, handle)
debug('cluster message received from worker ', worker, ': ', message);
if (worker.topic && worker.data) {
message = worker;
worker = undefined;
}
if (message && message.topic && message.topic === 'log4js:message') {
debug('received message: ', message.data);
const logEvent = LoggingEvent.deserialise(message.data);
sendToListeners(logEvent);
}
};
if (!disabled) {
configuration.addListener((config) => {
// clear out the listeners, because configure has been called.
listeners.length = 0;
({
pm2,
disableClustering: disabled,
pm2InstanceVar = 'NODE_APP_INSTANCE',
} = config);
debug(`clustering disabled ? ${disabled}`);
debug(`cluster.isMaster ? ${cluster && cluster.isMaster}`);
debug(`pm2 enabled ? ${pm2}`);
debug(`pm2InstanceVar = ${pm2InstanceVar}`);
debug(`process.env[${pm2InstanceVar}] = ${process.env[pm2InstanceVar]}`);
// just in case configure is called after shutdown
if (pm2) {
process.removeListener('message', receiver);
}
if (cluster && cluster.removeListener) {
cluster.removeListener('message', receiver);
}
if (disabled || config.disableClustering) {
debug('Not listening for cluster messages, because clustering disabled.');
} else if (isPM2Master()) {
// PM2 cluster support
// PM2 runs everything as workers - install pm2-intercom for this to work.
// we only want one of the app instances to write logs
debug('listening for PM2 broadcast messages');
process.on('message', receiver);
} else if (cluster && cluster.isMaster) {
debug('listening for cluster messages');
cluster.on('message', receiver);
} else {
debug('not listening for messages, because we are not a master process');
}
});
}
module.exports = {
onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster),
isMaster,
send: (msg) => {
if (isMaster()) {
sendToListeners(msg);
} else {
if (!pm2) {
msg.cluster = {
workerId: cluster.worker.id,
worker: process.pid,
};
}
process.send({ topic: 'log4js:message', data: msg.serialise() });
}
},
onMessage: (listener) => {
listeners.push(listener);
},
};
| const debug = require('debug')('log4js:clustering');
const LoggingEvent = require('./LoggingEvent');
const configuration = require('./configuration');
let disabled = false;
let cluster = null;
try {
// eslint-disable-next-line global-require
cluster = require('cluster');
} catch (e) {
debug('cluster module not present');
disabled = true;
}
const listeners = [];
let pm2 = false;
let pm2InstanceVar = 'NODE_APP_INSTANCE';
const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0';
const isMaster = () =>
disabled || (cluster && cluster.isMaster) || isPM2Master();
const sendToListeners = (logEvent) => {
listeners.forEach((l) => l(logEvent));
};
// in a multi-process node environment, worker loggers will use
// process.send
const receiver = (worker, message) => {
// prior to node v6, the worker parameter was not passed (args were message, handle)
debug('cluster message received from worker ', worker, ': ', message);
if (worker.topic && worker.data) {
message = worker;
worker = undefined;
}
if (message && message.topic && message.topic === 'log4js:message') {
debug('received message: ', message.data);
const logEvent = LoggingEvent.deserialise(message.data);
sendToListeners(logEvent);
}
};
if (!disabled) {
configuration.addListener((config) => {
// clear out the listeners, because configure has been called.
listeners.length = 0;
({
pm2,
disableClustering: disabled,
pm2InstanceVar = 'NODE_APP_INSTANCE',
} = config);
debug(`clustering disabled ? ${disabled}`);
debug(`cluster.isMaster ? ${cluster && cluster.isMaster}`);
debug(`pm2 enabled ? ${pm2}`);
debug(`pm2InstanceVar = ${pm2InstanceVar}`);
debug(`process.env[${pm2InstanceVar}] = ${process.env[pm2InstanceVar]}`);
// just in case configure is called after shutdown
if (pm2) {
process.removeListener('message', receiver);
}
if (cluster && cluster.removeListener) {
cluster.removeListener('message', receiver);
}
if (disabled || config.disableClustering) {
debug('Not listening for cluster messages, because clustering disabled.');
} else if (isPM2Master()) {
// PM2 cluster support
// PM2 runs everything as workers - install pm2-intercom for this to work.
// we only want one of the app instances to write logs
debug('listening for PM2 broadcast messages');
process.on('message', receiver);
} else if (cluster && cluster.isMaster) {
debug('listening for cluster messages');
cluster.on('message', receiver);
} else {
debug('not listening for messages, because we are not a master process');
}
});
}
module.exports = {
onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster),
isMaster,
send: (msg) => {
if (isMaster()) {
sendToListeners(msg);
} else {
if (!pm2) {
msg.cluster = {
workerId: cluster.worker.id,
worker: process.pid,
};
}
process.send({ topic: 'log4js:message', data: msg.serialise() });
}
},
onMessage: (listener) => {
listeners.push(listener);
},
};
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./examples/log-rolling.js | const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
console: {
type: 'console',
},
file: {
type: 'file',
filename: 'tmp-test.log',
maxLogSize: 1024,
backups: 3,
},
},
categories: {
default: { appenders: ['console', 'file'], level: 'info' },
},
});
const log = log4js.getLogger('test');
function doTheLogging(x) {
log.info('Logging something %d', x);
}
let i = 0;
for (; i < 5000; i += 1) {
doTheLogging(i);
}
| const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
console: {
type: 'console',
},
file: {
type: 'file',
filename: 'tmp-test.log',
maxLogSize: 1024,
backups: 3,
},
},
categories: {
default: { appenders: ['console', 'file'], level: 'info' },
},
});
const log = log4js.getLogger('test');
function doTheLogging(x) {
log.info('Logging something %d', x);
}
let i = 0;
for (; i < 5000; i += 1) {
doTheLogging(i);
}
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./examples/example-socket.js | const log4js = require('../lib/log4js');
const cluster = require('cluster');
const numCPUs = require('os').cpus().length;
let i = 0;
if (cluster.isMaster) {
log4js.configure({
appenders: {
console: { type: 'console' },
master: {
type: 'multiprocess',
mode: 'master',
appender: 'console',
},
},
categories: {
default: { appenders: ['console'], level: 'info' },
},
});
console.info('Master creating %d workers', numCPUs);
for (i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('death', (worker) => {
console.info('Worker %d died.', worker.pid);
});
} else {
log4js.configure({
appenders: {
worker: { type: 'multiprocess', mode: 'worker' },
},
categories: {
default: { appenders: ['worker'], level: 'info' },
},
});
const logger = log4js.getLogger('example-socket');
console.info('Worker %d started.', process.pid);
for (i = 0; i < 1000; i++) {
logger.info('Worker %d - logging something %d', process.pid, i);
}
log4js.shutdown(() => {
process.exit();
});
}
| const log4js = require('../lib/log4js');
const cluster = require('cluster');
const numCPUs = require('os').cpus().length;
let i = 0;
if (cluster.isMaster) {
log4js.configure({
appenders: {
console: { type: 'console' },
master: {
type: 'multiprocess',
mode: 'master',
appender: 'console',
},
},
categories: {
default: { appenders: ['console'], level: 'info' },
},
});
console.info('Master creating %d workers', numCPUs);
for (i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('death', (worker) => {
console.info('Worker %d died.', worker.pid);
});
} else {
log4js.configure({
appenders: {
worker: { type: 'multiprocess', mode: 'worker' },
},
categories: {
default: { appenders: ['worker'], level: 'info' },
},
});
const logger = log4js.getLogger('example-socket');
console.info('Worker %d started.', process.pid);
for (i = 0; i < 1000; i++) {
logger.info('Worker %d - logging something %d', process.pid, i);
}
log4js.shutdown(() => {
process.exit();
});
}
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./examples/pm2.js | const log4js = require('../lib/log4js');
// NOTE: for PM2 support to work you'll need to install the pm2-intercom module
// `pm2 install pm2-intercom`
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' },
},
categories: {
default: { appenders: ['out'], level: 'info' },
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID',
});
const logger = log4js.getLogger('app');
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info('last bubbles', process.env.INSTANCE_ID);
// give pm2 time to set everything up, before we tear it down
setTimeout(() => {
log4js.shutdown(() => {
console.error('All done, shutdown cb returned.');
});
}, 5000);
| const log4js = require('../lib/log4js');
// NOTE: for PM2 support to work you'll need to install the pm2-intercom module
// `pm2 install pm2-intercom`
log4js.configure({
appenders: {
out: { type: 'file', filename: 'pm2logs.log' },
},
categories: {
default: { appenders: ['out'], level: 'info' },
},
pm2: true,
pm2InstanceVar: 'INSTANCE_ID',
});
const logger = log4js.getLogger('app');
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID);
logger.info('last bubbles', process.env.INSTANCE_ID);
// give pm2 time to set everything up, before we tear it down
setTimeout(() => {
log4js.shutdown(() => {
console.error('All done, shutdown cb returned.');
});
}, 5000);
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./lib/appenders/fileSync.js | const debug = require('debug')('log4js:fileSync');
const path = require('path');
const fs = require('fs');
const os = require('os');
const eol = os.EOL;
function touchFile(file, options) {
// attempt to create the directory
const mkdir = (dir) => {
try {
return fs.mkdirSync(dir, { recursive: true });
} catch (e) {
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
// recursive creation of parent first
if (e.code === 'ENOENT') {
mkdir(path.dirname(dir));
return mkdir(dir);
}
// throw error for all except EEXIST and EROFS (read-only filesystem)
if (e.code !== 'EEXIST' && e.code !== 'EROFS') {
throw e;
}
// EEXIST: throw if file and not directory
// EROFS : throw if directory not found
else {
try {
if (fs.statSync(dir).isDirectory()) {
return dir;
}
throw e;
} catch (err) {
throw e;
}
}
}
};
mkdir(path.dirname(file));
// try to throw EISDIR, EROFS, EACCES
fs.appendFileSync(file, '', { mode: options.mode, flag: options.flags });
}
class RollingFileSync {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
this.filename = filename;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
touchFile(file, options);
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
shouldRoll() {
debug(
'should roll with current size %d, and max size %d',
this.currentSize,
this.size
);
return this.currentSize >= this.size;
}
roll(filename) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
return (
parseInt(filename_.slice(`${path.basename(filename)}.`.length), 10) || 0
);
}
function byIndex(a, b) {
return index(a) - index(b);
}
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(`${filename}.${idx + 1}`);
} catch (e) {
// ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`);
fs.renameSync(
path.join(path.dirname(filename), fileToRename),
`${filename}.${idx + 1}`
);
}
}
function renameTheFiles() {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
const files = fs.readdirSync(path.dirname(filename));
files
.filter(justTheseFiles)
.sort(byIndex)
.reverse()
.forEach(increaseFileIndex);
}
debug('Rolling, rolling, rolling');
renameTheFiles();
}
// eslint-disable-next-line no-unused-vars
write(chunk, encoding) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the file');
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug('in write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
}
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file the file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(
file,
layout,
logSize,
numBackups,
options,
timezoneOffset
) {
if (typeof file !== 'string' || file.length === 0) {
throw new Error(`Invalid filename: ${file}`);
} else if (file.endsWith(path.sep)) {
throw new Error(`Filename is a directory: ${file}`);
} else {
// handle ~ expansion: https://github.com/nodejs/node/issues/684
// exclude ~ and ~filename as these can be valid files
file = file.replace(new RegExp(`^~(?=${path.sep}.+)`), os.homedir());
}
file = path.normalize(file);
numBackups = !numBackups && numBackups !== 0 ? 5 : numBackups;
debug(
'Creating fileSync appender (',
file,
', ',
logSize,
', ',
numBackups,
', ',
options,
', ',
timezoneOffset,
')'
);
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new RollingFileSync(filePath, fileSize, numFiles, options);
} else {
stream = ((f) => {
// touch the file to apply flags (like w to truncate the file)
touchFile(f, options);
return {
write(data) {
fs.appendFileSync(f, data);
},
};
})(filePath);
}
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
return (loggingEvent) => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
const options = {
flags: config.flags || 'a',
encoding: config.encoding || 'utf8',
mode: config.mode || 0o600,
};
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
options,
config.timezoneOffset
);
}
module.exports.configure = configure;
| const debug = require('debug')('log4js:fileSync');
const path = require('path');
const fs = require('fs');
const os = require('os');
const eol = os.EOL;
function touchFile(file, options) {
// attempt to create the directory
const mkdir = (dir) => {
try {
return fs.mkdirSync(dir, { recursive: true });
} catch (e) {
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
// recursive creation of parent first
if (e.code === 'ENOENT') {
mkdir(path.dirname(dir));
return mkdir(dir);
}
// throw error for all except EEXIST and EROFS (read-only filesystem)
if (e.code !== 'EEXIST' && e.code !== 'EROFS') {
throw e;
}
// EEXIST: throw if file and not directory
// EROFS : throw if directory not found
else {
try {
if (fs.statSync(dir).isDirectory()) {
return dir;
}
throw e;
} catch (err) {
throw e;
}
}
}
};
mkdir(path.dirname(file));
// try to throw EISDIR, EROFS, EACCES
fs.appendFileSync(file, '', { mode: options.mode, flag: options.flags });
}
class RollingFileSync {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
this.filename = filename;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
touchFile(file, options);
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
shouldRoll() {
debug(
'should roll with current size %d, and max size %d',
this.currentSize,
this.size
);
return this.currentSize >= this.size;
}
roll(filename) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
return (
parseInt(filename_.slice(`${path.basename(filename)}.`.length), 10) || 0
);
}
function byIndex(a, b) {
return index(a) - index(b);
}
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(`${filename}.${idx + 1}`);
} catch (e) {
// ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`);
fs.renameSync(
path.join(path.dirname(filename), fileToRename),
`${filename}.${idx + 1}`
);
}
}
function renameTheFiles() {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
const files = fs.readdirSync(path.dirname(filename));
files
.filter(justTheseFiles)
.sort(byIndex)
.reverse()
.forEach(increaseFileIndex);
}
debug('Rolling, rolling, rolling');
renameTheFiles();
}
// eslint-disable-next-line no-unused-vars
write(chunk, encoding) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the file');
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug('in write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
}
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file the file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(
file,
layout,
logSize,
numBackups,
options,
timezoneOffset
) {
if (typeof file !== 'string' || file.length === 0) {
throw new Error(`Invalid filename: ${file}`);
} else if (file.endsWith(path.sep)) {
throw new Error(`Filename is a directory: ${file}`);
} else {
// handle ~ expansion: https://github.com/nodejs/node/issues/684
// exclude ~ and ~filename as these can be valid files
file = file.replace(new RegExp(`^~(?=${path.sep}.+)`), os.homedir());
}
file = path.normalize(file);
numBackups = !numBackups && numBackups !== 0 ? 5 : numBackups;
debug(
'Creating fileSync appender (',
file,
', ',
logSize,
', ',
numBackups,
', ',
options,
', ',
timezoneOffset,
')'
);
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new RollingFileSync(filePath, fileSize, numFiles, options);
} else {
stream = ((f) => {
// touch the file to apply flags (like w to truncate the file)
touchFile(f, options);
return {
write(data) {
fs.appendFileSync(f, data);
},
};
})(filePath);
}
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
return (loggingEvent) => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
const options = {
flags: config.flags || 'a',
encoding: config.encoding || 'utf8',
mode: config.mode || 0o600,
};
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
options,
config.timezoneOffset
);
}
module.exports.configure = configure;
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./docs/categories.md | # Categories
Categories are groups of log events. The category for log events is defined when you get a _Logger_ from log4js (`log4js.getLogger('somecategory')`). Log events with the same _category_ will go to the same _appenders_.
## Default configuration
When defining your appenders through a configuration, at least one category must be defined.
```javascript
const log4js = require("log4js");
log4js.configure({
appenders: {
out: { type: "stdout" },
app: { type: "file", filename: "application.log" },
},
categories: {
default: { appenders: ["out"], level: "trace" },
app: { appenders: ["app"], level: "trace" },
},
});
const logger = log4js.getLogger();
logger.trace("This will use the default category and go to stdout");
const logToFile = log4js.getLogger("app");
logToFile.trace("This will go to a file");
```
## Categories inheritance
Log4js supports a hierarchy for categories, using dots to separate layers - for example, log events in the category 'myapp.submodule' will use the level for 'myapp' if none is defined for 'myapp.submodule', and also any appenders defined for 'myapp'.
This behaviour can be disabled by setting inherit=false on the sub-category.
```javascript
const log4js = require("log4js");
log4js.configure({
appenders: {
console: { type: "console" },
app: { type: "file", filename: "application.log" },
},
categories: {
default: { appenders: ["console"], level: "trace" },
catA: { appenders: ["console"], level: "error" },
"catA.catB": { appenders: ["app"], level: "trace" },
},
});
const loggerA = log4js.getLogger("catA");
loggerA.error("This will be written to console with log level ERROR");
loggerA.trace("This will not be written");
const loggerAB = log4js.getLogger("catA.catB");
loggerAB.error(
"This will be written with log level ERROR to console and to a file"
);
loggerAB.trace(
"This will be written with log level TRACE to console and to a file"
);
```
Two categories are defined:
- Log events with category 'catA' will go to appender 'console' only.
- Log events with category 'catA.catB' will go to appenders 'console' and 'app'.
Appenders will see and log an event only if the category level is less than or equal to the event's level.
| # Categories
Categories are groups of log events. The category for log events is defined when you get a _Logger_ from log4js (`log4js.getLogger('somecategory')`). Log events with the same _category_ will go to the same _appenders_.
## Default configuration
When defining your appenders through a configuration, at least one category must be defined.
```javascript
const log4js = require("log4js");
log4js.configure({
appenders: {
out: { type: "stdout" },
app: { type: "file", filename: "application.log" },
},
categories: {
default: { appenders: ["out"], level: "trace" },
app: { appenders: ["app"], level: "trace" },
},
});
const logger = log4js.getLogger();
logger.trace("This will use the default category and go to stdout");
const logToFile = log4js.getLogger("app");
logToFile.trace("This will go to a file");
```
## Categories inheritance
Log4js supports a hierarchy for categories, using dots to separate layers - for example, log events in the category 'myapp.submodule' will use the level for 'myapp' if none is defined for 'myapp.submodule', and also any appenders defined for 'myapp'.
This behaviour can be disabled by setting inherit=false on the sub-category.
```javascript
const log4js = require("log4js");
log4js.configure({
appenders: {
console: { type: "console" },
app: { type: "file", filename: "application.log" },
},
categories: {
default: { appenders: ["console"], level: "trace" },
catA: { appenders: ["console"], level: "error" },
"catA.catB": { appenders: ["app"], level: "trace" },
},
});
const loggerA = log4js.getLogger("catA");
loggerA.error("This will be written to console with log level ERROR");
loggerA.trace("This will not be written");
const loggerAB = log4js.getLogger("catA.catB");
loggerAB.error(
"This will be written with log level ERROR to console and to a file"
);
loggerAB.trace(
"This will be written with log level TRACE to console and to a file"
);
```
Two categories are defined:
- Log events with category 'catA' will go to appender 'console' only.
- Log events with category 'catA.catB' will go to appenders 'console' and 'app'.
Appenders will see and log an event only if the category level is less than or equal to the event's level.
| -1 |
|
log4js-node/log4js-node | 1,325 | ci: removed scheduled job from codeql and separated npm audit | lamweili | "2022-09-26T05:51:46Z" | "2022-09-26T05:59:10Z" | 084479c35183c9ed31484acdbc54cd34fd462684 | 447b949ebd7a82a4bd281a021a471cae1c27dc55 | ci: removed scheduled job from codeql and separated npm audit. | ./lib/layouts.js | const dateFormat = require('date-format');
const os = require('os');
const util = require('util');
const path = require('path');
const url = require('url');
const debug = require('debug')('log4js:layouts');
const styles = {
// styles
bold: [1, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
// grayscale
white: [37, 39],
grey: [90, 39],
black: [90, 39],
// colors
blue: [34, 39],
cyan: [36, 39],
green: [32, 39],
magenta: [35, 39],
red: [91, 39],
yellow: [33, 39],
};
function colorizeStart(style) {
return style ? `\x1B[${styles[style][0]}m` : '';
}
function colorizeEnd(style) {
return style ? `\x1B[${styles[style][1]}m` : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize(str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
return colorize(
util.format(
'[%s] [%s] %s - ',
dateFormat.asString(loggingEvent.startTime),
loggingEvent.level.toString(),
loggingEvent.categoryName
),
colour
);
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout(loggingEvent) {
return (
timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data)
);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout(loggingEvent) {
return (
timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) +
util.format(...loggingEvent.data)
);
}
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* both padding and truncation can be negative.
* Negative truncation = trunc from end of string
* Positive truncation = trunc from start of string
* Negative padding = pad right
* Positive padding = pad left
*
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in constious formats
* - %% %
* - %n newline
* - %z pid
* - %f filename
* - %l line number
* - %o column postion
* - %s call stack
* - %C class name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %M method or function name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %A method or function alias [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %F fully qualified caller name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* - %X{<tokenname>} add dynamic tokens to your log. Tokens are specified in logger context
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { 'pid' : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @return {Function}
* @param pattern
* @param tokens
* @param timezoneOffset
*
* @authors ['Stephan Strittmatter', 'Jan Schmidle']
*/
function patternLayout(pattern, tokens) {
const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n';
const regex = /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflosCMAF%])(\{([^}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
let loggerName = loggingEvent.categoryName;
if (specifier) {
const precision = parseInt(specifier, 10);
const loggerNameBits = loggerName.split('.');
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits
.slice(loggerNameBits.length - precision)
.join('.');
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
let format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
switch (format) {
case 'ISO8601':
case 'ISO8601_FORMAT':
format = dateFormat.ISO8601_FORMAT;
break;
case 'ISO8601_WITH_TZ_OFFSET':
case 'ISO8601_WITH_TZ_OFFSET_FORMAT':
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
break;
case 'ABSOLUTE':
process.emitWarning(
'Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. ' +
'Please use %d{ABSOLUTETIME} instead.',
'DeprecationWarning',
'log4js-node-DEP0003'
);
debug(
'[log4js-node-DEP0003]',
'DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.'
);
// falls through
case 'ABSOLUTETIME':
case 'ABSOLUTETIME_FORMAT':
format = dateFormat.ABSOLUTETIME_FORMAT;
break;
case 'DATE':
process.emitWarning(
'Pattern %d{DATE} is deprecated due to the confusion it causes when used. ' +
'Please use %d{DATETIME} instead.',
'DeprecationWarning',
'log4js-node-DEP0004'
);
debug(
'[log4js-node-DEP0004]',
'DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.'
);
// falls through
case 'DATETIME':
case 'DATETIME_FORMAT':
format = dateFormat.DATETIME_FORMAT;
break;
// no default
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return util.format(...loggingEvent.data);
}
function endOfLine() {
return os.EOL;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime);
}
function startColour(loggingEvent) {
return colorizeStart(loggingEvent.level.colour);
}
function endColour(loggingEvent) {
return colorizeEnd(loggingEvent.level.colour);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
return loggingEvent && loggingEvent.pid
? loggingEvent.pid.toString()
: process.pid.toString();
}
function clusterInfo() {
// this used to try to return the master and worker pids,
// but it would never have worked because master pid is not available to workers
// leaving this here to maintain compatibility for patterns
return pid();
}
function userDefined(loggingEvent, specifier) {
if (typeof tokens[specifier] !== 'undefined') {
return typeof tokens[specifier] === 'function'
? tokens[specifier](loggingEvent)
: tokens[specifier];
}
return null;
}
function contextDefined(loggingEvent, specifier) {
const resolver = loggingEvent.context[specifier];
if (typeof resolver !== 'undefined') {
return typeof resolver === 'function' ? resolver(loggingEvent) : resolver;
}
return null;
}
function fileName(loggingEvent, specifier) {
let filename = loggingEvent.fileName || '';
// support for ESM as it uses url instead of path for file
/* istanbul ignore next: unsure how to simulate ESM for test coverage */
const convertFileURLToPath = function(filepath) {
const urlPrefix = 'file://';
if (filepath.startsWith(urlPrefix)) {
// https://nodejs.org/api/url.html#urlfileurltopathurl
if (typeof url.fileURLToPath === 'function') {
filepath = url.fileURLToPath(filepath);
}
// backward-compatible for nodejs pre-10.12.0 (without url.fileURLToPath method)
else {
// posix: file:///hello/world/foo.txt -> /hello/world/foo.txt -> /hello/world/foo.txt
// win32: file:///C:/path/foo.txt -> /C:/path/foo.txt -> \C:\path\foo.txt -> C:\path\foo.txt
// win32: file://nas/foo.txt -> //nas/foo.txt -> nas\foo.txt -> \\nas\foo.txt
filepath = path.normalize(
filepath.replace(new RegExp(`^${urlPrefix}`), '')
);
if (process.platform === 'win32') {
if (filepath.startsWith('\\')) {
filepath = filepath.slice(1);
} else {
filepath = path.sep + path.sep + filepath;
}
}
}
}
return filepath;
};
filename = convertFileURLToPath(filename);
if (specifier) {
const fileDepth = parseInt(specifier, 10);
const fileList = filename.split(path.sep);
if (fileList.length > fileDepth) {
filename = fileList.slice(-fileDepth).join(path.sep);
}
}
return filename;
}
function lineNumber(loggingEvent) {
return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : '';
}
function columnNumber(loggingEvent) {
return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : '';
}
function callStack(loggingEvent) {
return loggingEvent.callStack || '';
}
function className(loggingEvent) {
return loggingEvent.className || '';
}
function functionName(loggingEvent) {
return loggingEvent.functionName || '';
}
function functionAlias(loggingEvent) {
return loggingEvent.functionAlias || '';
}
function callerName(loggingEvent) {
return loggingEvent.callerName || '';
}
const replacers = {
c: categoryName,
d: formatAsDate,
h: hostname,
m: formatMessage,
n: endOfLine,
p: logLevel,
r: startTime,
'[': startColour,
']': endColour,
y: clusterInfo,
z: pid,
'%': percent,
x: userDefined,
X: contextDefined,
f: fileName,
l: lineNumber,
o: columnNumber,
s: callStack,
C: className,
M: functionName,
A: functionAlias,
F: callerName,
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
let len;
if (truncation) {
len = parseInt(truncation.slice(1), 10);
// negative truncate length means truncate from end of string
return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len);
}
return toTruncate;
}
function pad(padding, toPad) {
let len;
if (padding) {
if (padding.charAt(0) === '-') {
len = parseInt(padding.slice(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += ' ';
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = ` ${toPad}`;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
let replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function(loggingEvent) {
let formattedString = '';
let result;
let searchString = pattern;
while ((result = regex.exec(searchString)) !== null) {
// const matchedString = result[0];
const padding = result[1];
const truncation = result[2];
const conversionCharacter = result[3];
const specifier = result[5];
const text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += text.toString();
} else {
// Create a raw replacement string based on the conversion
// character and specifier
const replacement = replaceToken(
conversionCharacter,
loggingEvent,
specifier
);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.slice(result.index + result[0].length);
}
return formattedString;
};
}
const layoutMakers = {
messagePassThrough() {
return messagePassThroughLayout;
},
basic() {
return basicLayout;
},
colored() {
return colouredLayout;
},
coloured() {
return colouredLayout;
},
pattern(config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
dummy() {
return dummyLayout;
},
};
module.exports = {
basicLayout,
messagePassThroughLayout,
patternLayout,
colouredLayout,
coloredLayout: colouredLayout,
dummyLayout,
addLayout(name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout(name, config) {
return layoutMakers[name] && layoutMakers[name](config);
},
};
| const dateFormat = require('date-format');
const os = require('os');
const util = require('util');
const path = require('path');
const url = require('url');
const debug = require('debug')('log4js:layouts');
const styles = {
// styles
bold: [1, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
// grayscale
white: [37, 39],
grey: [90, 39],
black: [90, 39],
// colors
blue: [34, 39],
cyan: [36, 39],
green: [32, 39],
magenta: [35, 39],
red: [91, 39],
yellow: [33, 39],
};
function colorizeStart(style) {
return style ? `\x1B[${styles[style][0]}m` : '';
}
function colorizeEnd(style) {
return style ? `\x1B[${styles[style][1]}m` : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize(str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
return colorize(
util.format(
'[%s] [%s] %s - ',
dateFormat.asString(loggingEvent.startTime),
loggingEvent.level.toString(),
loggingEvent.categoryName
),
colour
);
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout(loggingEvent) {
return (
timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data)
);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout(loggingEvent) {
return (
timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) +
util.format(...loggingEvent.data)
);
}
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* both padding and truncation can be negative.
* Negative truncation = trunc from end of string
* Positive truncation = trunc from start of string
* Negative padding = pad right
* Positive padding = pad left
*
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in constious formats
* - %% %
* - %n newline
* - %z pid
* - %f filename
* - %l line number
* - %o column postion
* - %s call stack
* - %C class name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %M method or function name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %A method or function alias [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %F fully qualified caller name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* - %X{<tokenname>} add dynamic tokens to your log. Tokens are specified in logger context
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { 'pid' : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @return {Function}
* @param pattern
* @param tokens
* @param timezoneOffset
*
* @authors ['Stephan Strittmatter', 'Jan Schmidle']
*/
function patternLayout(pattern, tokens) {
const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n';
const regex = /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflosCMAF%])(\{([^}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
let loggerName = loggingEvent.categoryName;
if (specifier) {
const precision = parseInt(specifier, 10);
const loggerNameBits = loggerName.split('.');
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits
.slice(loggerNameBits.length - precision)
.join('.');
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
let format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
switch (format) {
case 'ISO8601':
case 'ISO8601_FORMAT':
format = dateFormat.ISO8601_FORMAT;
break;
case 'ISO8601_WITH_TZ_OFFSET':
case 'ISO8601_WITH_TZ_OFFSET_FORMAT':
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
break;
case 'ABSOLUTE':
process.emitWarning(
'Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. ' +
'Please use %d{ABSOLUTETIME} instead.',
'DeprecationWarning',
'log4js-node-DEP0003'
);
debug(
'[log4js-node-DEP0003]',
'DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.'
);
// falls through
case 'ABSOLUTETIME':
case 'ABSOLUTETIME_FORMAT':
format = dateFormat.ABSOLUTETIME_FORMAT;
break;
case 'DATE':
process.emitWarning(
'Pattern %d{DATE} is deprecated due to the confusion it causes when used. ' +
'Please use %d{DATETIME} instead.',
'DeprecationWarning',
'log4js-node-DEP0004'
);
debug(
'[log4js-node-DEP0004]',
'DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.'
);
// falls through
case 'DATETIME':
case 'DATETIME_FORMAT':
format = dateFormat.DATETIME_FORMAT;
break;
// no default
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return util.format(...loggingEvent.data);
}
function endOfLine() {
return os.EOL;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime);
}
function startColour(loggingEvent) {
return colorizeStart(loggingEvent.level.colour);
}
function endColour(loggingEvent) {
return colorizeEnd(loggingEvent.level.colour);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
return loggingEvent && loggingEvent.pid
? loggingEvent.pid.toString()
: process.pid.toString();
}
function clusterInfo() {
// this used to try to return the master and worker pids,
// but it would never have worked because master pid is not available to workers
// leaving this here to maintain compatibility for patterns
return pid();
}
function userDefined(loggingEvent, specifier) {
if (typeof tokens[specifier] !== 'undefined') {
return typeof tokens[specifier] === 'function'
? tokens[specifier](loggingEvent)
: tokens[specifier];
}
return null;
}
function contextDefined(loggingEvent, specifier) {
const resolver = loggingEvent.context[specifier];
if (typeof resolver !== 'undefined') {
return typeof resolver === 'function' ? resolver(loggingEvent) : resolver;
}
return null;
}
function fileName(loggingEvent, specifier) {
let filename = loggingEvent.fileName || '';
// support for ESM as it uses url instead of path for file
/* istanbul ignore next: unsure how to simulate ESM for test coverage */
const convertFileURLToPath = function(filepath) {
const urlPrefix = 'file://';
if (filepath.startsWith(urlPrefix)) {
// https://nodejs.org/api/url.html#urlfileurltopathurl
if (typeof url.fileURLToPath === 'function') {
filepath = url.fileURLToPath(filepath);
}
// backward-compatible for nodejs pre-10.12.0 (without url.fileURLToPath method)
else {
// posix: file:///hello/world/foo.txt -> /hello/world/foo.txt -> /hello/world/foo.txt
// win32: file:///C:/path/foo.txt -> /C:/path/foo.txt -> \C:\path\foo.txt -> C:\path\foo.txt
// win32: file://nas/foo.txt -> //nas/foo.txt -> nas\foo.txt -> \\nas\foo.txt
filepath = path.normalize(
filepath.replace(new RegExp(`^${urlPrefix}`), '')
);
if (process.platform === 'win32') {
if (filepath.startsWith('\\')) {
filepath = filepath.slice(1);
} else {
filepath = path.sep + path.sep + filepath;
}
}
}
}
return filepath;
};
filename = convertFileURLToPath(filename);
if (specifier) {
const fileDepth = parseInt(specifier, 10);
const fileList = filename.split(path.sep);
if (fileList.length > fileDepth) {
filename = fileList.slice(-fileDepth).join(path.sep);
}
}
return filename;
}
function lineNumber(loggingEvent) {
return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : '';
}
function columnNumber(loggingEvent) {
return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : '';
}
function callStack(loggingEvent) {
return loggingEvent.callStack || '';
}
function className(loggingEvent) {
return loggingEvent.className || '';
}
function functionName(loggingEvent) {
return loggingEvent.functionName || '';
}
function functionAlias(loggingEvent) {
return loggingEvent.functionAlias || '';
}
function callerName(loggingEvent) {
return loggingEvent.callerName || '';
}
const replacers = {
c: categoryName,
d: formatAsDate,
h: hostname,
m: formatMessage,
n: endOfLine,
p: logLevel,
r: startTime,
'[': startColour,
']': endColour,
y: clusterInfo,
z: pid,
'%': percent,
x: userDefined,
X: contextDefined,
f: fileName,
l: lineNumber,
o: columnNumber,
s: callStack,
C: className,
M: functionName,
A: functionAlias,
F: callerName,
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
let len;
if (truncation) {
len = parseInt(truncation.slice(1), 10);
// negative truncate length means truncate from end of string
return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len);
}
return toTruncate;
}
function pad(padding, toPad) {
let len;
if (padding) {
if (padding.charAt(0) === '-') {
len = parseInt(padding.slice(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += ' ';
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = ` ${toPad}`;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
let replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function(loggingEvent) {
let formattedString = '';
let result;
let searchString = pattern;
while ((result = regex.exec(searchString)) !== null) {
// const matchedString = result[0];
const padding = result[1];
const truncation = result[2];
const conversionCharacter = result[3];
const specifier = result[5];
const text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += text.toString();
} else {
// Create a raw replacement string based on the conversion
// character and specifier
const replacement = replaceToken(
conversionCharacter,
loggingEvent,
specifier
);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.slice(result.index + result[0].length);
}
return formattedString;
};
}
const layoutMakers = {
messagePassThrough() {
return messagePassThroughLayout;
},
basic() {
return basicLayout;
},
colored() {
return colouredLayout;
},
coloured() {
return colouredLayout;
},
pattern(config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
dummy() {
return dummyLayout;
},
};
module.exports = {
basicLayout,
messagePassThroughLayout,
patternLayout,
colouredLayout,
coloredLayout: colouredLayout,
dummyLayout,
addLayout(name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout(name, config) {
return layoutMakers[name] && layoutMakers[name](config);
},
};
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/api.md | ## API
## configuration - `log4js.configure(object || string)`
There is one entry point for configuring log4js. A string argument is treated as a filename to load configuration from. Config files should be JSON, and contain a configuration object (see format below). You can also pass a configuration object directly to `configure`.
Configuration should take place immediately after requiring log4js for the first time in your application. If you do not call `configure`, log4js will use `LOG4JS_CONFIG` (if defined) or the default config. The default config defines one appender, which would log to stdout with the coloured layout, but also defines the default log level to be `OFF` - which means no logs will be output.
If you are using `cluster`, then include the call to `configure` in the worker processes as well as the master. That way the worker processes will pick up the right levels for your categories, and any custom levels you may have defined. Appenders will only be defined on the master process, so there is no danger of multiple processes attempting to write to the same appender. No special configuration is needed to use log4js with clusters, unlike previous versions.
Configuration objects must define at least one appender, and a default category. Log4js will throw an exception if the configuration is invalid.
`configure` method call returns the configured log4js object.
### Configuration Object
Properties:
- `levels` (optional, object) - used for defining custom log levels, or redefining existing ones; this is a map with the level name as the key (string, case insensitive), and an object as the value. The object should have two properties: the level value (integer) as the value, and the colour. Log levels are used to assign importance to log messages, with the integer value being used to sort them. If you do not specify anything in your configuration, the default values are used (ALL < TRACE < DEBUG < INFO < WARN < ERROR < FATAL < MARK < OFF - note that OFF is intended to be used to turn off logging, not as a level for actual logging, i.e. you would never call `logger.off('some log message')`). Levels defined here are used in addition to the default levels, with the integer value being used to determine their relation to the default levels. If you define a level with the same name as a default level, then the integer value in the config takes precedence. Level names must begin with a letter, and can only contain letters, numbers and underscores.
- `appenders` (object) - a map of named appenders (string) to appender definitions (object); appender definitions must have a property `type` (string) - other properties depend on the appender type.
- `categories` (object) - a map of named categories (string) to category definitions (object). You must define the `default` category which is used for all log events that do not match a specific category. Category definitions have two properties:
- `appenders` (array of strings) - the list of appender names to be used for this category. A category must have at least one appender.
- `level` (string, case insensitive) - the minimum log level that this category will send to the appenders. For example, if set to 'error' then the appenders will only receive log events of level 'error', 'fatal', 'mark' - log events of 'info', 'warn', 'debug', or 'trace' will be ignored.
- `enableCallStack` (boolean, optional, defaults to `false`) - setting this to `true` will make log events for this category use the call stack to generate line numbers and file names in the event. See [pattern layout](layouts.md) for how to output these values in your appenders.
- `pm2` (boolean) (optional) - set this to true if you're running your app using [pm2](http://pm2.keymetrics.io), otherwise logs will not work (you'll also need to install pm2-intercom as pm2 module: `pm2 install pm2-intercom`)
- `pm2InstanceVar` (string) (optional, defaults to 'NODE_APP_INSTANCE') - set this if you're using pm2 and have changed the default name of the NODE_APP_INSTANCE variable.
- `disableClustering` (boolean) (optional) - set this to true if you liked the way log4js used to just ignore clustered environments, or you're having trouble with PM2 logging. Each worker process will do its own logging. Be careful with this if you're logging to files, weirdness can occur.
## Loggers - `log4js.getLogger([category])`
This function takes a single optional string argument to denote the category to be used for log events on this logger. If no category is specified, the events will be routed to the appender for the `default` category. The function returns a `Logger` object which has its level set to the level specified for that category in the config and implements the following functions:
- `<level>(args...)` - where `<level>` can be any of the lower case names of the levels (including any custom levels defined). For example: `logger.info('some info')` will dispatch a log event with a level of info. If you're using the basic, coloured or message pass-through [layouts](layouts.md), the logged string will have its formatting (placeholders like `%s`, `%d`, etc) delegated to [util.format](https://nodejs.org/api/util.html#util_util_format_format_args).
- `is<level>Enabled()` - returns true if a log event of level <level> (camel case) would be dispatched to the appender defined for the logger's category. For example: `logger.isInfoEnabled()` will return true if the level for the logger is INFO or lower.
- `addContext(<key>,<value>)` - where `<key>` is a string, `<value>` can be anything. This stores a key-value pair that is added to all log events generated by the logger. Uses would be to add ids for tracking a user through your application. Currently only the `logFaces` appenders make use of the context values.
- `removeContext(<key>)` - removes a previously defined key-value pair from the context.
- `clearContext()` - removes all context pairs from the logger.
- `setParseCallStackFunction(function)` - Allow to override the default way to parse the callstack data for the layout pattern, a generic javascript Error object is passed to the function. Must return an object with properties : `functionName` / `fileName` / `lineNumber` / `columnNumber` / `callStack`. Can for example be used if all of your log call are made from one "debug" class and you would to "erase" this class from the callstack to only show the function which called your "debug" class.
The `Logger` object has the following properties:
- `level` - where `level` is a log4js level or a string that matches a level (e.g. 'info', 'INFO', etc). This allows overriding the configured level for this logger. Changing this value applies to all loggers of the same category.
- `useCallStack` - where `useCallStack` is a boolean to indicate if log events for this category use the call stack to generate line numbers and file names in the event. This allows overriding the configured useCallStack for this logger. Changing this value applies to all loggers of the same category.
## Shutdown - `log4js.shutdown(cb)`
`shutdown` accepts a callback that will be called when log4js has closed all appenders and finished writing log events. Use this when your programme exits to make sure all your logs are written to files, sockets are closed, etc.
## Custom Layouts - `log4js.addLayout(type, fn)`
This function is used to add user-defined layout functions. See [layouts](layouts.md) for more details and an example.
| ## API
## configuration - `log4js.configure(object || string)`
There is one entry point for configuring log4js. A string argument is treated as a filename to load configuration from. Config files should be JSON, and contain a configuration object (see format below). You can also pass a configuration object directly to `configure`.
Configuration should take place immediately after requiring log4js for the first time in your application. If you do not call `configure`, log4js will use `LOG4JS_CONFIG` (if defined) or the default config. The default config defines one appender, which would log to stdout with the coloured layout, but also defines the default log level to be `OFF` - which means no logs will be output.
If you are using `cluster`, then include the call to `configure` in the worker processes as well as the master. That way the worker processes will pick up the right levels for your categories, and any custom levels you may have defined. Appenders will only be defined on the master process, so there is no danger of multiple processes attempting to write to the same appender. No special configuration is needed to use log4js with clusters, unlike previous versions.
Configuration objects must define at least one appender, and a default category. Log4js will throw an exception if the configuration is invalid.
`configure` method call returns the configured log4js object.
### Configuration Object
Properties:
- `levels` (optional, object) - used for defining custom log levels, or redefining existing ones; this is a map with the level name as the key (string, case insensitive), and an object as the value. The object should have two properties: the level value (integer) as the value, and the colour. Log levels are used to assign importance to log messages, with the integer value being used to sort them. If you do not specify anything in your configuration, the default values are used (ALL < TRACE < DEBUG < INFO < WARN < ERROR < FATAL < MARK < OFF - note that OFF is intended to be used to turn off logging, not as a level for actual logging, i.e. you would never call `logger.off('some log message')`). Levels defined here are used in addition to the default levels, with the integer value being used to determine their relation to the default levels. If you define a level with the same name as a default level, then the integer value in the config takes precedence. Level names must begin with a letter, and can only contain letters, numbers and underscores.
- `appenders` (object) - a map of named appenders (string) to appender definitions (object); appender definitions must have a property `type` (string) - other properties depend on the appender type.
- `categories` (object) - a map of named categories (string) to category definitions (object). You must define the `default` category which is used for all log events that do not match a specific category. Category definitions have two properties:
- `appenders` (array of strings) - the list of appender names to be used for this category. A category must have at least one appender.
- `level` (string, case insensitive) - the minimum log level that this category will send to the appenders. For example, if set to 'error' then the appenders will only receive log events of level 'error', 'fatal', 'mark' - log events of 'info', 'warn', 'debug', or 'trace' will be ignored.
- `enableCallStack` (boolean, optional, defaults to `false`) - setting this to `true` will make log events for this category use the call stack to generate line numbers and file names in the event. See [pattern layout](layouts.md) for how to output these values in your appenders.
- `pm2` (boolean) (optional) - set this to true if you're running your app using [pm2](http://pm2.keymetrics.io), otherwise logs will not work (you'll also need to install pm2-intercom as pm2 module: `pm2 install pm2-intercom`)
- `pm2InstanceVar` (string) (optional, defaults to 'NODE_APP_INSTANCE') - set this if you're using pm2 and have changed the default name of the NODE_APP_INSTANCE variable.
- `disableClustering` (boolean) (optional) - set this to true if you liked the way log4js used to just ignore clustered environments, or you're having trouble with PM2 logging. Each worker process will do its own logging. Be careful with this if you're logging to files, weirdness can occur.
## Loggers - `log4js.getLogger([category])`
This function takes a single optional string argument to denote the category to be used for log events on this logger. If no category is specified, the events will be routed to the appender for the `default` category. The function returns a `Logger` object which has its level set to the level specified for that category in the config and implements the following functions:
- `<level>(args...)` - where `<level>` can be any of the lower case names of the levels (including any custom levels defined). For example: `logger.info('some info')` will dispatch a log event with a level of info. If you're using the basic, coloured or message pass-through [layouts](layouts.md), the logged string will have its formatting (placeholders like `%s`, `%d`, etc) delegated to [util.format](https://nodejs.org/api/util.html#util_util_format_format_args).
- `is<level>Enabled()` - returns true if a log event of level <level> (camel case) would be dispatched to the appender defined for the logger's category. For example: `logger.isInfoEnabled()` will return true if the level for the logger is INFO or lower.
- `addContext(<key>,<value>)` - where `<key>` is a string, `<value>` can be anything. This stores a key-value pair that is added to all log events generated by the logger. Uses would be to add ids for tracking a user through your application. Currently only the `logFaces` appenders make use of the context values.
- `removeContext(<key>)` - removes a previously defined key-value pair from the context.
- `clearContext()` - removes all context pairs from the logger.
- `setParseCallStackFunction(function)` - Allow to override the default way to parse the callstack data for the layout pattern, a generic javascript Error object is passed to the function. Must return an object with properties : `functionName` / `fileName` / `lineNumber` / `columnNumber` / `callStack`. Can for example be used if all of your log call are made from one "debug" class and you would to "erase" this class from the callstack to only show the function which called your "debug" class.
The `Logger` object has the following properties:
- `level` - where `level` is a log4js level or a string that matches a level (e.g. 'info', 'INFO', etc). This allows overriding the configured level for this logger. Changing this value applies to all loggers of the same category.
- `useCallStack` - where `useCallStack` is a boolean to indicate if log events for this category use the call stack to generate line numbers and file names in the event. This allows overriding the configured useCallStack for this logger. Changing this value applies to all loggers of the same category.
## Shutdown - `log4js.shutdown([callback])`
`shutdown` accepts a callback that will be called when log4js has closed all appenders and finished writing log events. Use this when your programme exits to make sure all your logs are written to files, sockets are closed, etc.
## Custom Layouts - `log4js.addLayout(type, fn)`
This function is used to add user-defined layout functions. See [layouts](layouts.md) for more details and an example.
| 1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./lib/log4js.js | /**
* @fileoverview log4js is a library to log in JavaScript in similar manner
* than in log4j for Java (but not really).
*
* <h3>Example:</h3>
* <pre>
* const logging = require('log4js');
* const log = logging.getLogger('some-category');
*
* //call the log
* log.trace('trace me' );
* </pre>
*
* NOTE: the authors below are the original browser-based log4js authors
* don't try to contact them about bugs in this version :)
* @author Stephan Strittmatter - http://jroller.com/page/stritti
* @author Seth Chisamore - http://www.chisamore.com
* @since 2005-05-20
* Website: http://log4js.berlios.de
*/
const debug = require('debug')('log4js:main');
const fs = require('fs');
const deepClone = require('rfdc')({ proto: true });
const configuration = require('./configuration');
const layouts = require('./layouts');
const levels = require('./levels');
const appenders = require('./appenders');
const categories = require('./categories');
const Logger = require('./logger');
const clustering = require('./clustering');
const connectLogger = require('./connect-logger');
const recordingModule = require('./appenders/recording');
let enabled = false;
function sendLogEventToAppender(logEvent) {
if (!enabled) return;
debug('Received log event ', logEvent);
const categoryAppenders = categories.appendersForCategory(
logEvent.categoryName
);
categoryAppenders.forEach((appender) => {
appender(logEvent);
});
}
function loadConfigurationFile(filename) {
debug(`Loading configuration from ${filename}`);
try {
return JSON.parse(fs.readFileSync(filename, 'utf8'));
} catch (e) {
throw new Error(
`Problem reading config from file "${filename}". Error was ${e.message}`,
e
);
}
}
function configure(configurationFileOrObject) {
if (enabled) {
// eslint-disable-next-line no-use-before-define
shutdown();
}
let configObject = configurationFileOrObject;
if (typeof configObject === 'string') {
configObject = loadConfigurationFile(configurationFileOrObject);
}
debug(`Configuration is ${configObject}`);
configuration.configure(deepClone(configObject));
clustering.onMessage(sendLogEventToAppender);
enabled = true;
// eslint-disable-next-line no-use-before-define
return log4js;
}
function recording() {
return recordingModule;
}
/**
* Shutdown all log appenders. This will first disable all writing to appenders
* and then call the shutdown function each appender.
*
* @params {Function} cb - The callback to be invoked once all appenders have
* shutdown. If an error occurs, the callback will be given the error object
* as the first argument.
*/
function shutdown(cb) {
debug('Shutdown called. Disabling all log writing.');
// First, disable all writing to appenders. This prevents appenders from
// not being able to be drained because of run-away log writes.
enabled = false;
// Clone out to maintain a reference
const appendersToCheck = Array.from(appenders.values());
// Reset immediately to prevent leaks
appenders.init();
categories.init();
// Call each of the shutdown functions in parallel
const shutdownFunctions = appendersToCheck.reduceRight(
(accum, next) => (next.shutdown ? accum + 1 : accum),
0
);
if (shutdownFunctions === 0) {
debug('No appenders with shutdown functions found.');
if (cb) {
cb(undefined);
}
return null;
}
let completed = 0;
let error;
debug(`Found ${shutdownFunctions} appenders with shutdown functions.`);
function complete(err) {
error = error || err;
completed += 1;
debug(`Appender shutdowns complete: ${completed} / ${shutdownFunctions}`);
if (completed >= shutdownFunctions) {
debug('All shutdown functions completed.');
if (cb) {
cb(error);
}
}
}
appendersToCheck
.filter((a) => a.shutdown)
.forEach((a) => a.shutdown(complete));
return null;
}
/**
* Get a logger instance.
* @static
* @param loggerCategoryName
* @return {Logger} instance of logger for the category
*/
function getLogger(category) {
if (!enabled) {
configure(
process.env.LOG4JS_CONFIG || {
appenders: { out: { type: 'stdout' } },
categories: { default: { appenders: ['out'], level: 'OFF' } },
}
);
}
return new Logger(category || 'default');
}
/**
* @name log4js
* @namespace Log4js
* @property getLogger
* @property configure
* @property shutdown
*/
const log4js = {
getLogger,
configure,
shutdown,
connectLogger,
levels,
addLayout: layouts.addLayout,
recording,
};
module.exports = log4js;
| /**
* @fileoverview log4js is a library to log in JavaScript in similar manner
* than in log4j for Java (but not really).
*
* <h3>Example:</h3>
* <pre>
* const logging = require('log4js');
* const log = logging.getLogger('some-category');
*
* //call the log
* log.trace('trace me' );
* </pre>
*
* NOTE: the authors below are the original browser-based log4js authors
* don't try to contact them about bugs in this version :)
* @author Stephan Strittmatter - http://jroller.com/page/stritti
* @author Seth Chisamore - http://www.chisamore.com
* @since 2005-05-20
* Website: http://log4js.berlios.de
*/
const debug = require('debug')('log4js:main');
const fs = require('fs');
const deepClone = require('rfdc')({ proto: true });
const configuration = require('./configuration');
const layouts = require('./layouts');
const levels = require('./levels');
const appenders = require('./appenders');
const categories = require('./categories');
const Logger = require('./logger');
const clustering = require('./clustering');
const connectLogger = require('./connect-logger');
const recordingModule = require('./appenders/recording');
let enabled = false;
function sendLogEventToAppender(logEvent) {
if (!enabled) return;
debug('Received log event ', logEvent);
const categoryAppenders = categories.appendersForCategory(
logEvent.categoryName
);
categoryAppenders.forEach((appender) => {
appender(logEvent);
});
}
function loadConfigurationFile(filename) {
debug(`Loading configuration from ${filename}`);
try {
return JSON.parse(fs.readFileSync(filename, 'utf8'));
} catch (e) {
throw new Error(
`Problem reading config from file "${filename}". Error was ${e.message}`,
e
);
}
}
function configure(configurationFileOrObject) {
if (enabled) {
// eslint-disable-next-line no-use-before-define
shutdown();
}
let configObject = configurationFileOrObject;
if (typeof configObject === 'string') {
configObject = loadConfigurationFile(configurationFileOrObject);
}
debug(`Configuration is ${configObject}`);
configuration.configure(deepClone(configObject));
clustering.onMessage(sendLogEventToAppender);
enabled = true;
// eslint-disable-next-line no-use-before-define
return log4js;
}
function recording() {
return recordingModule;
}
/**
* This callback type is called `shutdownCallback` and is displayed as a global symbol.
*
* @callback shutdownCallback
* @param {Error} [error]
*/
/**
* Shutdown all log appenders. This will first disable all writing to appenders
* and then call the shutdown function each appender.
*
* @param {shutdownCallback} [callback] - The callback to be invoked once all appenders have
* shutdown. If an error occurs, the callback will be given the error object
* as the first argument.
*/
function shutdown(callback) {
debug('Shutdown called. Disabling all log writing.');
// First, disable all writing to appenders. This prevents appenders from
// not being able to be drained because of run-away log writes.
enabled = false;
// Clone out to maintain a reference
const appendersToCheck = Array.from(appenders.values());
// Reset immediately to prevent leaks
appenders.init();
categories.init();
// Count the number of shutdown functions
const shutdownFunctions = appendersToCheck.reduceRight(
(accum, next) => (next.shutdown ? accum + 1 : accum),
0
);
if (shutdownFunctions === 0) {
debug('No appenders with shutdown functions found.');
if (callback) {
callback();
}
}
let completed = 0;
let error;
debug(`Found ${shutdownFunctions} appenders with shutdown functions.`);
function complete(err) {
error = error || err;
completed += 1;
debug(`Appender shutdowns complete: ${completed} / ${shutdownFunctions}`);
if (completed >= shutdownFunctions) {
debug('All shutdown functions completed.');
if (callback) {
callback(error);
}
}
}
// Call each of the shutdown functions
appendersToCheck
.filter((a) => a.shutdown)
.forEach((a) => a.shutdown(complete));
}
/**
* Get a logger instance.
* @static
* @param {string} [category=default]
* @return {Logger} instance of logger for the category
*/
function getLogger(category) {
if (!enabled) {
configure(
process.env.LOG4JS_CONFIG || {
appenders: { out: { type: 'stdout' } },
categories: { default: { appenders: ['out'], level: 'OFF' } },
}
);
}
return new Logger(category || 'default');
}
/**
* @name log4js
* @namespace Log4js
* @property getLogger
* @property configure
* @property shutdown
*/
const log4js = {
getLogger,
configure,
shutdown,
connectLogger,
levels,
addLayout: layouts.addLayout,
recording,
};
module.exports = log4js;
| 1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./types/log4js.d.ts | // Type definitions for log4js
type Format =
| string
| ((req: any, res: any, formatter: (str: string) => string) => string);
export interface Log4js {
getLogger(category?: string): Logger;
configure(filename: string): Log4js;
configure(config: Configuration): Log4js;
addLayout(
name: string,
config: (a: any) => (logEvent: LoggingEvent) => string
): void;
connectLogger(
logger: Logger,
options: { format?: Format; level?: string; nolog?: any }
): any; // express.Handler;
levels: Levels;
shutdown(cb?: (error: Error | undefined) => void): null;
}
export function getLogger(category?: string): Logger;
export function configure(filename: string): Log4js;
export function configure(config: Configuration): Log4js;
export function addLayout(
name: string,
config: (a: any) => (logEvent: LoggingEvent) => any
): void;
export function connectLogger(
logger: Logger,
options: {
format?: Format;
level?: string;
nolog?: any;
statusRules?: any[];
context?: boolean;
}
): any; // express.Handler;
export function recording(): Recording;
export const levels: Levels;
export function shutdown(cb?: (error: Error | undefined) => void): null;
export interface BasicLayout {
type: 'basic';
}
export interface ColoredLayout {
type: 'colored' | 'coloured';
}
export interface MessagePassThroughLayout {
type: 'messagePassThrough';
}
export interface DummyLayout {
type: 'dummy';
}
export interface Level {
isEqualTo(other: string): boolean;
isEqualTo(otherLevel: Level): boolean;
isLessThanOrEqualTo(other: string): boolean;
isLessThanOrEqualTo(otherLevel: Level): boolean;
isGreaterThanOrEqualTo(other: string): boolean;
isGreaterThanOrEqualTo(otherLevel: Level): boolean;
colour: string;
level: number;
levelStr: string;
}
export interface LoggingEvent {
categoryName: string; // name of category
level: Level; // level of message
data: any[]; // objects to log
startTime: Date;
pid: number;
context: any;
cluster?: {
workerId: number;
worker: number;
};
functionName?: string;
fileName?: string;
lineNumber?: number;
columnNumber?: number;
callStack?: string;
serialise(): string;
}
export type Token = ((logEvent: LoggingEvent) => string) | string;
export interface PatternLayout {
type: 'pattern';
// specifier for the output format, using placeholders as described below
pattern: string;
// user-defined tokens to be used in the pattern
tokens?: { [name: string]: Token };
}
export interface CustomLayout {
[key: string]: any;
type: string;
}
export type Layout =
| BasicLayout
| ColoredLayout
| MessagePassThroughLayout
| DummyLayout
| PatternLayout
| CustomLayout;
/**
* Category Filter
*
* @see https://log4js-node.github.io/log4js-node/categoryFilter.html
*/
export interface CategoryFilterAppender {
type: 'categoryFilter';
// the category (or categories if you provide an array of values) that will be excluded from the appender.
exclude?: string | string[];
// the name of the appender to filter. see https://log4js-node.github.io/log4js-node/layouts.html
appender?: string;
}
/**
* No Log Filter
*
* @see https://log4js-node.github.io/log4js-node/noLogFilter.html
*/
export interface NoLogFilterAppender {
type: 'noLogFilter';
// the regular expression (or the regular expressions if you provide an array of values)
// will be used for evaluating the events to pass to the appender.
// The events, which will match the regular expression, will be excluded and so not logged.
exclude: string | string[];
// the name of an appender, defined in the same configuration, that you want to filter.
appender: string;
}
/**
* Console Appender
*
* @see https://log4js-node.github.io/log4js-node/console.html
*/
export interface ConsoleAppender {
type: 'console';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface FileAppender {
type: 'file';
// the path of the file where you want your logs written.
filename: string;
// (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
keepFileExt?: boolean;
// (defaults to .) the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
fileNameSep?: string;
}
export interface SyncfileAppender {
type: 'fileSync';
// the path of the file where you want your logs written.
filename: string;
// (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
}
export interface DateFileAppender {
type: 'dateFile';
// the path of the file where you want your logs written.
filename: string;
// (defaults to yyyy-MM-dd) the pattern to use to determine when to roll the logs.
/**
* The following strings are recognised in the pattern:
* - yyyy : the full year, use yy for just the last two digits
* - MM : the month
* - dd : the day of the month
* - hh : the hour of the day (24-hour clock)
* - mm : the minute of the hour
* - ss : seconds
* - SSS : milliseconds (although I'm not sure you'd want to roll your logs every millisecond)
* - O : timezone (capital letter o)
*/
pattern?: string;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
keepFileExt?: boolean;
// (defaults to .) the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
fileNameSep?: string;
// (defaults to false) include the pattern in the name of the current log file.
alwaysIncludePattern?: boolean;
// (defaults to 1) the number of old files that matches the pattern to keep (excluding the hot file).
numBackups?: number;
}
export interface LogLevelFilterAppender {
type: 'logLevelFilter';
// the name of an appender, defined in the same configuration, that you want to filter
appender: string;
// the minimum level of event to allow through the filter
level: string;
// (defaults to FATAL) the maximum level of event to allow through the filter
maxLevel?: string;
}
export interface MultiFileAppender {
type: 'multiFile';
// the base part of the generated log filename
base: string;
// the value to use to split files (see below).
property: string;
// the suffix for the generated log filename.
extension: string;
}
export interface MultiprocessAppender {
type: 'multiprocess';
// controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
mode: 'master' | 'worker';
// (only needed if mode == master) the name of the appender to send the log events to
appender?: string;
// (defaults to 5000) the port to listen on, or send to
loggerPort?: number;
// (defaults to localhost) the host/IP address to listen on, or send to
loggerHost?: string;
}
export interface RecordingAppender {
type: 'recording';
}
export interface StandardErrorAppender {
type: 'stderr';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface StandardOutputAppender {
type: 'stdout';
// (defaults to ColoredLayout)
layout?: Layout;
}
/**
* TCP Appender
*
* @see https://log4js-node.github.io/log4js-node/tcp.html
*/
export interface TCPAppender {
type: 'tcp';
// (defaults to 5000)
port?: number;
// (defaults to localhost)
host?: string;
// (defaults to __LOG4JS__)
endMsg?: string;
// (defaults to a serialized log event)
layout?: Layout;
}
export interface CustomAppender {
type: string | AppenderModule;
[key: string]: any;
}
/**
* Mapping of all Appenders to allow for declaration merging
* @example
* declare module 'log4js' {
* interface Appenders {
* StorageTestAppender: {
* type: 'storageTest';
* storageMedium: 'dvd' | 'usb' | 'hdd';
* };
* }
* }
*/
export interface Appenders {
CategoryFilterAppender: CategoryFilterAppender;
ConsoleAppender: ConsoleAppender;
FileAppender: FileAppender;
SyncfileAppender: SyncfileAppender;
DateFileAppender: DateFileAppender;
LogLevelFilterAppender: LogLevelFilterAppender;
NoLogFilterAppender: NoLogFilterAppender;
MultiFileAppender: MultiFileAppender;
MultiprocessAppender: MultiprocessAppender;
RecordingAppender: RecordingAppender;
StandardErrorAppender: StandardErrorAppender;
StandardOutputAppender: StandardOutputAppender;
TCPAppender: TCPAppender;
CustomAppender: CustomAppender;
}
export interface AppenderModule {
configure: (
config?: Config,
layouts?: LayoutsParam,
findAppender?: () => AppenderFunction,
levels?: Levels
) => AppenderFunction;
}
export type AppenderFunction = (loggingEvent: LoggingEvent) => void;
// TODO: Actually add types here...
// It's supposed to be the full config element
export type Config = any;
export interface LayoutsParam {
basicLayout: LayoutFunction;
messagePassThroughLayout: LayoutFunction;
patternLayout: LayoutFunction;
colouredLayout: LayoutFunction;
coloredLayout: LayoutFunction;
dummyLayout: LayoutFunction;
addLayout: (name: string, serializerGenerator: LayoutFunction) => void;
layout: (name: string, config: PatternToken) => LayoutFunction;
}
export interface PatternToken {
pattern: string; // TODO type this to enforce good pattern...
tokens: { [tokenName: string]: () => any };
}
export type LayoutFunction = (loggingEvent: LoggingEvent) => string;
export type Appender = Appenders[keyof Appenders];
export interface Levels {
ALL: Level;
MARK: Level;
TRACE: Level;
DEBUG: Level;
INFO: Level;
WARN: Level;
ERROR: Level;
FATAL: Level;
OFF: Level;
levels: Level[];
getLevel(level: Level | string, defaultLevel?: Level): Level;
addLevels(customLevels: object): void;
}
export interface Configuration {
appenders: { [name: string]: Appender };
categories: {
[name: string]: {
appenders: string[];
level: string;
enableCallStack?: boolean;
};
};
pm2?: boolean;
pm2InstanceVar?: string;
levels?: Levels;
disableClustering?: boolean;
}
export interface Recording {
configure(): AppenderFunction;
replay(): LoggingEvent[];
playback(): LoggingEvent[];
reset(): void;
erase(): void;
}
export interface Logger {
new (name: string): Logger;
readonly category: string;
level: Level | string;
log(level: Level | string, ...args: any[]): void;
isLevelEnabled(level?: string): boolean;
isTraceEnabled(): boolean;
isDebugEnabled(): boolean;
isInfoEnabled(): boolean;
isWarnEnabled(): boolean;
isErrorEnabled(): boolean;
isFatalEnabled(): boolean;
_log(level: Level, data: any): void;
addContext(key: string, value: any): void;
removeContext(key: string): void;
clearContext(): void;
setParseCallStackFunction(parseFunction: Function): void;
trace(message: any, ...args: any[]): void;
debug(message: any, ...args: any[]): void;
info(message: any, ...args: any[]): void;
warn(message: any, ...args: any[]): void;
error(message: any, ...args: any[]): void;
fatal(message: any, ...args: any[]): void;
mark(message: any, ...args: any[]): void;
}
| // Type definitions for log4js
type Format =
| string
| ((req: any, res: any, formatter: (str: string) => string) => string);
export interface Log4js {
getLogger(category?: string): Logger;
configure(filename: string): Log4js;
configure(config: Configuration): Log4js;
addLayout(
name: string,
config: (a: any) => (logEvent: LoggingEvent) => string
): void;
connectLogger(
logger: Logger,
options: { format?: Format; level?: string; nolog?: any }
): any; // express.Handler;
levels: Levels;
shutdown(cb?: (error?: Error) => void): void;
}
export function getLogger(category?: string): Logger;
export function configure(filename: string): Log4js;
export function configure(config: Configuration): Log4js;
export function addLayout(
name: string,
config: (a: any) => (logEvent: LoggingEvent) => any
): void;
export function connectLogger(
logger: Logger,
options: {
format?: Format;
level?: string;
nolog?: any;
statusRules?: any[];
context?: boolean;
}
): any; // express.Handler;
export function recording(): Recording;
export const levels: Levels;
export function shutdown(cb?: (error?: Error) => void): void;
export interface BasicLayout {
type: 'basic';
}
export interface ColoredLayout {
type: 'colored' | 'coloured';
}
export interface MessagePassThroughLayout {
type: 'messagePassThrough';
}
export interface DummyLayout {
type: 'dummy';
}
export interface Level {
isEqualTo(other: string): boolean;
isEqualTo(otherLevel: Level): boolean;
isLessThanOrEqualTo(other: string): boolean;
isLessThanOrEqualTo(otherLevel: Level): boolean;
isGreaterThanOrEqualTo(other: string): boolean;
isGreaterThanOrEqualTo(otherLevel: Level): boolean;
colour: string;
level: number;
levelStr: string;
}
export interface LoggingEvent {
categoryName: string; // name of category
level: Level; // level of message
data: any[]; // objects to log
startTime: Date;
pid: number;
context: any;
cluster?: {
workerId: number;
worker: number;
};
functionName?: string;
fileName?: string;
lineNumber?: number;
columnNumber?: number;
callStack?: string;
serialise(): string;
}
export type Token = ((logEvent: LoggingEvent) => string) | string;
export interface PatternLayout {
type: 'pattern';
// specifier for the output format, using placeholders as described below
pattern: string;
// user-defined tokens to be used in the pattern
tokens?: { [name: string]: Token };
}
export interface CustomLayout {
[key: string]: any;
type: string;
}
export type Layout =
| BasicLayout
| ColoredLayout
| MessagePassThroughLayout
| DummyLayout
| PatternLayout
| CustomLayout;
/**
* Category Filter
*
* @see https://log4js-node.github.io/log4js-node/categoryFilter.html
*/
export interface CategoryFilterAppender {
type: 'categoryFilter';
// the category (or categories if you provide an array of values) that will be excluded from the appender.
exclude?: string | string[];
// the name of the appender to filter. see https://log4js-node.github.io/log4js-node/layouts.html
appender?: string;
}
/**
* No Log Filter
*
* @see https://log4js-node.github.io/log4js-node/noLogFilter.html
*/
export interface NoLogFilterAppender {
type: 'noLogFilter';
// the regular expression (or the regular expressions if you provide an array of values)
// will be used for evaluating the events to pass to the appender.
// The events, which will match the regular expression, will be excluded and so not logged.
exclude: string | string[];
// the name of an appender, defined in the same configuration, that you want to filter.
appender: string;
}
/**
* Console Appender
*
* @see https://log4js-node.github.io/log4js-node/console.html
*/
export interface ConsoleAppender {
type: 'console';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface FileAppender {
type: 'file';
// the path of the file where you want your logs written.
filename: string;
// (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
keepFileExt?: boolean;
// (defaults to .) the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
fileNameSep?: string;
}
export interface SyncfileAppender {
type: 'fileSync';
// the path of the file where you want your logs written.
filename: string;
// (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
maxLogSize?: number | string;
// (defaults to 5) the number of old log files to keep (excluding the hot file).
backups?: number;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
}
export interface DateFileAppender {
type: 'dateFile';
// the path of the file where you want your logs written.
filename: string;
// (defaults to yyyy-MM-dd) the pattern to use to determine when to roll the logs.
/**
* The following strings are recognised in the pattern:
* - yyyy : the full year, use yy for just the last two digits
* - MM : the month
* - dd : the day of the month
* - hh : the hour of the day (24-hour clock)
* - mm : the minute of the hour
* - ss : seconds
* - SSS : milliseconds (although I'm not sure you'd want to roll your logs every millisecond)
* - O : timezone (capital letter o)
*/
pattern?: string;
// (defaults to BasicLayout)
layout?: Layout;
// (defaults to utf-8)
encoding?: string;
// (defaults to 0o600)
mode?: number;
// (defaults to a)
flags?: string;
// (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
compress?: boolean;
// (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
keepFileExt?: boolean;
// (defaults to .) the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
fileNameSep?: string;
// (defaults to false) include the pattern in the name of the current log file.
alwaysIncludePattern?: boolean;
// (defaults to 1) the number of old files that matches the pattern to keep (excluding the hot file).
numBackups?: number;
}
export interface LogLevelFilterAppender {
type: 'logLevelFilter';
// the name of an appender, defined in the same configuration, that you want to filter
appender: string;
// the minimum level of event to allow through the filter
level: string;
// (defaults to FATAL) the maximum level of event to allow through the filter
maxLevel?: string;
}
export interface MultiFileAppender {
type: 'multiFile';
// the base part of the generated log filename
base: string;
// the value to use to split files (see below).
property: string;
// the suffix for the generated log filename.
extension: string;
}
export interface MultiprocessAppender {
type: 'multiprocess';
// controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
mode: 'master' | 'worker';
// (only needed if mode == master) the name of the appender to send the log events to
appender?: string;
// (defaults to 5000) the port to listen on, or send to
loggerPort?: number;
// (defaults to localhost) the host/IP address to listen on, or send to
loggerHost?: string;
}
export interface RecordingAppender {
type: 'recording';
}
export interface StandardErrorAppender {
type: 'stderr';
// (defaults to ColoredLayout)
layout?: Layout;
}
export interface StandardOutputAppender {
type: 'stdout';
// (defaults to ColoredLayout)
layout?: Layout;
}
/**
* TCP Appender
*
* @see https://log4js-node.github.io/log4js-node/tcp.html
*/
export interface TCPAppender {
type: 'tcp';
// (defaults to 5000)
port?: number;
// (defaults to localhost)
host?: string;
// (defaults to __LOG4JS__)
endMsg?: string;
// (defaults to a serialized log event)
layout?: Layout;
}
export interface CustomAppender {
type: string | AppenderModule;
[key: string]: any;
}
/**
* Mapping of all Appenders to allow for declaration merging
* @example
* declare module 'log4js' {
* interface Appenders {
* StorageTestAppender: {
* type: 'storageTest';
* storageMedium: 'dvd' | 'usb' | 'hdd';
* };
* }
* }
*/
export interface Appenders {
CategoryFilterAppender: CategoryFilterAppender;
ConsoleAppender: ConsoleAppender;
FileAppender: FileAppender;
SyncfileAppender: SyncfileAppender;
DateFileAppender: DateFileAppender;
LogLevelFilterAppender: LogLevelFilterAppender;
NoLogFilterAppender: NoLogFilterAppender;
MultiFileAppender: MultiFileAppender;
MultiprocessAppender: MultiprocessAppender;
RecordingAppender: RecordingAppender;
StandardErrorAppender: StandardErrorAppender;
StandardOutputAppender: StandardOutputAppender;
TCPAppender: TCPAppender;
CustomAppender: CustomAppender;
}
export interface AppenderModule {
configure: (
config?: Config,
layouts?: LayoutsParam,
findAppender?: () => AppenderFunction,
levels?: Levels
) => AppenderFunction;
}
export type AppenderFunction = (loggingEvent: LoggingEvent) => void;
// TODO: Actually add types here...
// It's supposed to be the full config element
export type Config = any;
export interface LayoutsParam {
basicLayout: LayoutFunction;
messagePassThroughLayout: LayoutFunction;
patternLayout: LayoutFunction;
colouredLayout: LayoutFunction;
coloredLayout: LayoutFunction;
dummyLayout: LayoutFunction;
addLayout: (name: string, serializerGenerator: LayoutFunction) => void;
layout: (name: string, config: PatternToken) => LayoutFunction;
}
export interface PatternToken {
pattern: string; // TODO type this to enforce good pattern...
tokens: { [tokenName: string]: () => any };
}
export type LayoutFunction = (loggingEvent: LoggingEvent) => string;
export type Appender = Appenders[keyof Appenders];
export interface Levels {
ALL: Level;
MARK: Level;
TRACE: Level;
DEBUG: Level;
INFO: Level;
WARN: Level;
ERROR: Level;
FATAL: Level;
OFF: Level;
levels: Level[];
getLevel(level: Level | string, defaultLevel?: Level): Level;
addLevels(customLevels: object): void;
}
export interface Configuration {
appenders: { [name: string]: Appender };
categories: {
[name: string]: {
appenders: string[];
level: string;
enableCallStack?: boolean;
};
};
pm2?: boolean;
pm2InstanceVar?: string;
levels?: Levels;
disableClustering?: boolean;
}
export interface Recording {
configure(): AppenderFunction;
replay(): LoggingEvent[];
playback(): LoggingEvent[];
reset(): void;
erase(): void;
}
export interface Logger {
new (name: string): Logger;
readonly category: string;
level: Level | string;
log(level: Level | string, ...args: any[]): void;
isLevelEnabled(level?: string): boolean;
isTraceEnabled(): boolean;
isDebugEnabled(): boolean;
isInfoEnabled(): boolean;
isWarnEnabled(): boolean;
isErrorEnabled(): boolean;
isFatalEnabled(): boolean;
_log(level: Level, data: any): void;
addContext(key: string, value: any): void;
removeContext(key: string): void;
clearContext(): void;
setParseCallStackFunction(parseFunction: Function): void;
trace(message: any, ...args: any[]): void;
debug(message: any, ...args: any[]): void;
info(message: any, ...args: any[]): void;
warn(message: any, ...args: any[]): void;
error(message: any, ...args: any[]): void;
fatal(message: any, ...args: any[]): void;
mark(message: any, ...args: any[]): void;
}
| 1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/logLevelFilter-test.js | const { test } = require('tap');
const fs = require('fs');
const os = require('os');
const EOL = os.EOL || '\n';
const osDelay = process.platform === 'win32' ? 400 : 200;
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
// doesn't really matter if it failed
}
}
test('log4js logLevelFilter', (batch) => {
batch.test('appender', (t) => {
const log4js = require('../../lib/log4js');
const recording = require('../../lib/appenders/recording');
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'logLevelFilter',
appender: 'recorder',
level: 'ERROR',
},
},
categories: {
default: { appenders: ['filtered'], level: 'debug' },
},
});
const logger = log4js.getLogger('logLevelTest');
logger.debug('this should not trigger an event');
logger.warn('neither should this');
logger.error('this should, though');
logger.fatal('so should this');
const logEvents = recording.replay();
t.test(
'should only pass log events greater than or equal to its own level',
(assert) => {
assert.equal(logEvents.length, 2);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.equal(logEvents[1].data[0], 'so should this');
assert.end();
}
);
t.end();
});
batch.test('configure', (t) => {
const log4js = require('../../lib/log4js');
remove(`${__dirname}/logLevelFilter.log`);
remove(`${__dirname}/logLevelFilter-warnings.log`);
remove(`${__dirname}/logLevelFilter-debugs.log`);
t.teardown(() => {
remove(`${__dirname}/logLevelFilter.log`);
remove(`${__dirname}/logLevelFilter-warnings.log`);
remove(`${__dirname}/logLevelFilter-debugs.log`);
});
log4js.configure({
appenders: {
'warning-file': {
type: 'file',
filename: 'test/tap/logLevelFilter-warnings.log',
layout: { type: 'messagePassThrough' },
},
warnings: {
type: 'logLevelFilter',
level: 'WARN',
appender: 'warning-file',
},
'debug-file': {
type: 'file',
filename: 'test/tap/logLevelFilter-debugs.log',
layout: { type: 'messagePassThrough' },
},
debugs: {
type: 'logLevelFilter',
level: 'TRACE',
maxLevel: 'DEBUG',
appender: 'debug-file',
},
tests: {
type: 'file',
filename: 'test/tap/logLevelFilter.log',
layout: {
type: 'messagePassThrough',
},
},
},
categories: {
default: { appenders: ['tests', 'warnings', 'debugs'], level: 'trace' },
},
});
const logger = log4js.getLogger('tests');
logger.debug('debug');
logger.info('info');
logger.error('error');
logger.warn('warn');
logger.debug('debug');
logger.trace('trace');
// wait for the file system to catch up
setTimeout(() => {
t.test('tmp-tests.log should contain all log messages', (assert) => {
fs.readFile(
`${__dirname}/logLevelFilter.log`,
'utf8',
(err, contents) => {
const messages = contents.trim().split(EOL);
assert.same(messages, [
'debug',
'info',
'error',
'warn',
'debug',
'trace',
]);
assert.end();
}
);
});
t.test(
'tmp-tests-warnings.log should contain only error and warning logs',
(assert) => {
fs.readFile(
`${__dirname}/logLevelFilter-warnings.log`,
'utf8',
(err, contents) => {
const messages = contents.trim().split(EOL);
assert.same(messages, ['error', 'warn']);
assert.end();
}
);
}
);
t.test(
'tmp-tests-debugs.log should contain only trace and debug logs',
(assert) => {
fs.readFile(
`${__dirname}/logLevelFilter-debugs.log`,
'utf8',
(err, contents) => {
const messages = contents.trim().split(EOL);
assert.same(messages, ['debug', 'debug', 'trace']);
assert.end();
}
);
}
);
t.end();
}, osDelay);
});
batch.end();
});
| const { test } = require('tap');
const fs = require('fs');
const os = require('os');
const EOL = os.EOL || '\n';
const osDelay = process.platform === 'win32' ? 400 : 200;
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
// doesn't really matter if it failed
}
}
test('log4js logLevelFilter', (batch) => {
batch.test('appender', (t) => {
const log4js = require('../../lib/log4js');
const recording = require('../../lib/appenders/recording');
log4js.configure({
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'logLevelFilter',
appender: 'recorder',
level: 'ERROR',
},
},
categories: {
default: { appenders: ['filtered'], level: 'debug' },
},
});
const logger = log4js.getLogger('logLevelTest');
logger.debug('this should not trigger an event');
logger.warn('neither should this');
logger.error('this should, though');
logger.fatal('so should this');
const logEvents = recording.replay();
t.test(
'should only pass log events greater than or equal to its own level',
(assert) => {
assert.equal(logEvents.length, 2);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.equal(logEvents[1].data[0], 'so should this');
assert.end();
}
);
t.end();
});
batch.test('configure', (t) => {
const log4js = require('../../lib/log4js');
remove(`${__dirname}/logLevelFilter.log`);
remove(`${__dirname}/logLevelFilter-warnings.log`);
remove(`${__dirname}/logLevelFilter-debugs.log`);
t.teardown(() => {
remove(`${__dirname}/logLevelFilter.log`);
remove(`${__dirname}/logLevelFilter-warnings.log`);
remove(`${__dirname}/logLevelFilter-debugs.log`);
});
log4js.configure({
appenders: {
'warning-file': {
type: 'file',
filename: 'test/tap/logLevelFilter-warnings.log',
layout: { type: 'messagePassThrough' },
},
warnings: {
type: 'logLevelFilter',
level: 'WARN',
appender: 'warning-file',
},
'debug-file': {
type: 'file',
filename: 'test/tap/logLevelFilter-debugs.log',
layout: { type: 'messagePassThrough' },
},
debugs: {
type: 'logLevelFilter',
level: 'TRACE',
maxLevel: 'DEBUG',
appender: 'debug-file',
},
tests: {
type: 'file',
filename: 'test/tap/logLevelFilter.log',
layout: {
type: 'messagePassThrough',
},
},
},
categories: {
default: { appenders: ['tests', 'warnings', 'debugs'], level: 'trace' },
},
});
const logger = log4js.getLogger('tests');
logger.debug('debug');
logger.info('info');
logger.error('error');
logger.warn('warn');
logger.debug('debug');
logger.trace('trace');
// wait for the file system to catch up
setTimeout(() => {
t.test('tmp-tests.log should contain all log messages', (assert) => {
fs.readFile(
`${__dirname}/logLevelFilter.log`,
'utf8',
(err, contents) => {
const messages = contents.trim().split(EOL);
assert.same(messages, [
'debug',
'info',
'error',
'warn',
'debug',
'trace',
]);
assert.end();
}
);
});
t.test(
'tmp-tests-warnings.log should contain only error and warning logs',
(assert) => {
fs.readFile(
`${__dirname}/logLevelFilter-warnings.log`,
'utf8',
(err, contents) => {
const messages = contents.trim().split(EOL);
assert.same(messages, ['error', 'warn']);
assert.end();
}
);
}
);
t.test(
'tmp-tests-debugs.log should contain only trace and debug logs',
(assert) => {
fs.readFile(
`${__dirname}/logLevelFilter-debugs.log`,
'utf8',
(err, contents) => {
const messages = contents.trim().split(EOL);
assert.same(messages, ['debug', 'debug', 'trace']);
assert.end();
}
);
}
);
t.end();
}, osDelay);
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/dateFile.md | # Date Rolling File Appender
This is a file appender that rolls log files based on a configurable time, rather than the file size. When using the date file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the date file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
- `type` - `"dateFile"`
- `filename` - `string` - the path of the file where you want your logs written.
- `pattern` - `string` (optional, defaults to `yyyy-MM-dd`) - the pattern to use to determine when to roll the logs.
- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
- `encoding` - `string` (default "utf-8")
- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
- `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
- `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
- `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
- `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
- `alwaysIncludePattern` - `boolean` (default false) - include the pattern in the name of the current log file.
- `numBackups` - `integer` (default 1) - the number of old files that matches the pattern to keep (excluding the hot file).
The `pattern` is used to determine when the current log file should be renamed and a new log file created. For example, with a filename of 'cheese.log', and the default pattern of `.yyyy-MM-dd` - on startup this will result in a file called `cheese.log` being created and written to until the next write after midnight. When this happens, `cheese.log` will be renamed to `cheese.log.2017-04-30` and a new `cheese.log` file created. The appender uses the [date-format](https://github.com/nomiddlename/date-format) library to parse the `pattern`, and any of the valid formats can be used. Also note that there is no timer controlling the log rolling - changes in the pattern are determined on every log write. If no writes occur, then no log rolling will happen. If your application logs infrequently this could result in no log file being written for a particular time period.
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [file appender](file.md) as well. So you could roll files by both date and size.
## Example (default daily log rolling)
```javascript
log4js.configure({
appenders: {
everything: { type: "dateFile", filename: "all-the-logs.log" },
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
```
This example will result in files being rolled every day. The initial file will be `all-the-logs.log`, with the daily backups being `all-the-logs.log.2017-04-30`, etc.
## Example with hourly log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: "dateFile",
filename: "all-the-logs.log",
pattern: "yyyy-MM-dd-hh",
compress: true,
},
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
```
This will result in one current log file (`all-the-logs.log`). Every hour this file will be compressed and renamed to `all-the-logs.log.2017-04-30-08.gz` (for example) and a new `all-the-logs.log` created.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: "dateFile", filename: "out.log" },
},
categories: { default: { appenders: ["output"], level: "debug" } },
});
let paused = false;
process.on("log4js:pause", (value) => (paused = value));
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| # Date Rolling File Appender
This is a file appender that rolls log files based on a configurable time, rather than the file size. When using the date file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the date file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
- `type` - `"dateFile"`
- `filename` - `string` - the path of the file where you want your logs written.
- `pattern` - `string` (optional, defaults to `yyyy-MM-dd`) - the pattern to use to determine when to roll the logs.
- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
- `encoding` - `string` (default "utf-8")
- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
- `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
- `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
- `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
- `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
- `alwaysIncludePattern` - `boolean` (default false) - include the pattern in the name of the current log file.
- `numBackups` - `integer` (default 1) - the number of old files that matches the pattern to keep (excluding the hot file).
The `pattern` is used to determine when the current log file should be renamed and a new log file created. For example, with a filename of 'cheese.log', and the default pattern of `.yyyy-MM-dd` - on startup this will result in a file called `cheese.log` being created and written to until the next write after midnight. When this happens, `cheese.log` will be renamed to `cheese.log.2017-04-30` and a new `cheese.log` file created. The appender uses the [date-format](https://github.com/nomiddlename/date-format) library to parse the `pattern`, and any of the valid formats can be used. Also note that there is no timer controlling the log rolling - changes in the pattern are determined on every log write. If no writes occur, then no log rolling will happen. If your application logs infrequently this could result in no log file being written for a particular time period.
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [file appender](file.md) as well. So you could roll files by both date and size.
## Example (default daily log rolling)
```javascript
log4js.configure({
appenders: {
everything: { type: "dateFile", filename: "all-the-logs.log" },
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
```
This example will result in files being rolled every day. The initial file will be `all-the-logs.log`, with the daily backups being `all-the-logs.log.2017-04-30`, etc.
## Example with hourly log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: "dateFile",
filename: "all-the-logs.log",
pattern: "yyyy-MM-dd-hh",
compress: true,
},
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
```
This will result in one current log file (`all-the-logs.log`). Every hour this file will be compressed and renamed to `all-the-logs.log.2017-04-30-08.gz` (for example) and a new `all-the-logs.log` created.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: "dateFile", filename: "out.log" },
},
categories: { default: { appenders: ["output"], level: "debug" } },
});
let paused = false;
process.on("log4js:pause", (value) => (paused = value));
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/passenger-test.js | const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
// passenger provides a non-functional cluster module,
// but it does not implement the event emitter functions
const passengerCluster = {
disconnect() {
return false;
},
fork() {
return false;
},
setupMaster() {
return false;
},
isWorker: true,
isMaster: false,
schedulingPolicy: false,
settings: false,
worker: false,
workers: false,
};
const vcr = require('../../lib/appenders/recording');
const log4js = sandbox.require('../../lib/log4js', {
requires: {
cluster: passengerCluster,
'./appenders/recording': vcr,
},
});
test('When running in Passenger', (batch) => {
batch.test('it should still log', (t) => {
log4js.configure({
appenders: {
vcr: { type: 'recording' },
},
categories: {
default: { appenders: ['vcr'], level: 'info' },
},
disableClustering: true,
});
log4js.getLogger().info('This should still work');
const events = vcr.replay();
t.equal(events.length, 1);
t.equal(events[0].data[0], 'This should still work');
t.end();
});
batch.end();
});
| const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
// passenger provides a non-functional cluster module,
// but it does not implement the event emitter functions
const passengerCluster = {
disconnect() {
return false;
},
fork() {
return false;
},
setupMaster() {
return false;
},
isWorker: true,
isMaster: false,
schedulingPolicy: false,
settings: false,
worker: false,
workers: false,
};
const vcr = require('../../lib/appenders/recording');
const log4js = sandbox.require('../../lib/log4js', {
requires: {
cluster: passengerCluster,
'./appenders/recording': vcr,
},
});
test('When running in Passenger', (batch) => {
batch.test('it should still log', (t) => {
log4js.configure({
appenders: {
vcr: { type: 'recording' },
},
categories: {
default: { appenders: ['vcr'], level: 'info' },
},
disableClustering: true,
});
log4js.getLogger().info('This should still work');
const events = vcr.replay();
t.equal(events.length, 1);
t.equal(events[0].data[0], 'This should still work');
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/contrib-guidelines.md | # Want to help?
I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged:
- Fork the repo, make a feature branch just for your changes
- On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features.
- Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-)
- Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then).
| # Want to help?
I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged:
- Fork the repo, make a feature branch just for your changes
- On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features.
- Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-)
- Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then).
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/fileAppender-test.js | /* eslint max-classes-per-file: ["error", 2] */
const { test } = require('tap');
const fs = require('fs-extra');
const path = require('path');
const sandbox = require('@log4js-node/sandboxed-module');
const zlib = require('zlib');
const util = require('util');
const osDelay = process.platform === 'win32' ? 400 : 200;
const sleep = util.promisify(setTimeout);
const gunzip = util.promisify(zlib.gunzip);
const EOL = require('os').EOL || '\n';
const log4js = require('../../lib/log4js');
const removeFile = async (filename) => {
try {
await fs.unlink(filename);
} catch (e) {
// let's pretend this never happened
}
};
test('log4js fileAppender', (batch) => {
batch.test('with default fileAppender settings', async (t) => {
const testFile = path.join(__dirname, 'fa-default-test.log');
const logger = log4js.getLogger('default-settings');
await removeFile(testFile);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await removeFile(testFile);
});
log4js.configure({
appenders: { file: { type: 'file', filename: testFile } },
categories: { default: { appenders: ['file'], level: 'debug' } },
});
logger.info('This should be in the file.');
await sleep(osDelay);
const fileContents = await fs.readFile(testFile, 'utf8');
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.test('should give error if invalid filename', async (t) => {
const file = '';
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'file',
filename: file,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
new Error(`Invalid filename: ${file}`)
);
const dir = `.${path.sep}`;
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'file',
filename: dir,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
new Error(`Filename is a directory: ${dir}`)
);
t.end();
});
batch.test('should flush logs on shutdown', async (t) => {
const testFile = path.join(__dirname, 'fa-default-test.log');
const logger = log4js.getLogger('default-settings');
await removeFile(testFile);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await removeFile(testFile);
});
log4js.configure({
appenders: { test: { type: 'file', filename: testFile } },
categories: { default: { appenders: ['test'], level: 'trace' } },
});
logger.info('1');
logger.info('2');
logger.info('3');
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
const fileContents = await fs.readFile(testFile, 'utf8');
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.test('with a max file size and no backups', async (t) => {
const testFile = path.join(__dirname, 'fa-maxFileSize-test.log');
const logger = log4js.getLogger('max-file-size');
await removeFile(testFile);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await removeFile(testFile);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: 'file',
filename: testFile,
maxLogSize: 100,
backups: 0,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
});
logger.info('This is the first log message.');
logger.info('This is an intermediate log message.');
logger.info('This is the second log message.');
// wait for the file system to catch up
await sleep(osDelay * 2);
const fileContents = await fs.readFile(testFile, 'utf8');
t.match(fileContents, 'This is the second log message.');
t.equal(fileContents.indexOf('This is the first log message.'), -1);
const files = await fs.readdir(__dirname);
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-test.log')
);
t.equal(logFiles.length, 1, 'should be 1 file');
t.end();
});
batch.test('with a max file size in wrong unit mode', async (t) => {
const invalidUnit = '1Z';
const expectedError = new Error(`maxLogSize: "${invalidUnit}" is invalid`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'file',
maxLogSize: invalidUnit,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
expectedError
);
t.end();
});
batch.test('with a max file size in unit mode and no backups', async (t) => {
const testFile = path.join(__dirname, 'fa-maxFileSize-unit-test.log');
const logger = log4js.getLogger('max-file-size-unit');
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
});
// log file of 1K = 1024 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: 'file',
filename: testFile,
maxLogSize: '1K',
backups: 0,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
});
const maxLine = 22; // 1024 max file size / 47 bytes per line
for (let i = 0; i < maxLine; i++) {
logger.info('These are the log messages for the first file.'); // 46 bytes per line + '\n'
}
logger.info('This is the second log message.');
// wait for the file system to catch up
await sleep(osDelay);
const fileContents = await fs.readFile(testFile, 'utf8');
t.match(fileContents, 'This is the second log message.');
t.notMatch(fileContents, 'These are the log messages for the first file.');
const files = await fs.readdir(__dirname);
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-unit-test.log')
);
t.equal(logFiles.length, 1, 'should be 1 file');
t.end();
});
batch.test('with a max file size and 2 backups', async (t) => {
const testFile = path.join(
__dirname,
'fa-maxFileSize-with-backups-test.log'
);
const logger = log4js.getLogger('max-file-size-backups');
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1`),
removeFile(`${testFile}.2`),
]);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1`),
removeFile(`${testFile}.2`),
]);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
file: {
type: 'file',
filename: testFile,
maxLogSize: 50,
backups: 2,
},
},
categories: { default: { appenders: ['file'], level: 'debug' } },
});
logger.info('This is the first log message.');
logger.info('This is the second log message.');
logger.info('This is the third log message.');
logger.info('This is the fourth log message.');
// give the system a chance to open the stream
await sleep(osDelay);
const files = await fs.readdir(__dirname);
const logFiles = files
.sort()
.filter((file) => file.includes('fa-maxFileSize-with-backups-test.log'));
t.equal(logFiles.length, 3);
t.same(logFiles, [
'fa-maxFileSize-with-backups-test.log',
'fa-maxFileSize-with-backups-test.log.1',
'fa-maxFileSize-with-backups-test.log.2',
]);
let contents = await fs.readFile(path.join(__dirname, logFiles[0]), 'utf8');
t.match(contents, 'This is the fourth log message.');
contents = await fs.readFile(path.join(__dirname, logFiles[1]), 'utf8');
t.match(contents, 'This is the third log message.');
contents = await fs.readFile(path.join(__dirname, logFiles[2]), 'utf8');
t.match(contents, 'This is the second log message.');
t.end();
});
batch.test('with a max file size and 2 compressed backups', async (t) => {
const testFile = path.join(
__dirname,
'fa-maxFileSize-with-backups-compressed-test.log'
);
const logger = log4js.getLogger('max-file-size-backups');
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1.gz`),
removeFile(`${testFile}.2.gz`),
]);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1.gz`),
removeFile(`${testFile}.2.gz`),
]);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
file: {
type: 'file',
filename: testFile,
maxLogSize: 50,
backups: 2,
compress: true,
},
},
categories: { default: { appenders: ['file'], level: 'debug' } },
});
logger.info('This is the first log message.');
logger.info('This is the second log message.');
logger.info('This is the third log message.');
logger.info('This is the fourth log message.');
// give the system a chance to open the stream
await sleep(osDelay);
const files = await fs.readdir(__dirname);
const logFiles = files
.sort()
.filter((file) =>
file.includes('fa-maxFileSize-with-backups-compressed-test.log')
);
t.equal(logFiles.length, 3, 'should be 3 files');
t.same(logFiles, [
'fa-maxFileSize-with-backups-compressed-test.log',
'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
'fa-maxFileSize-with-backups-compressed-test.log.2.gz',
]);
let contents = await fs.readFile(path.join(__dirname, logFiles[0]), 'utf8');
t.match(contents, 'This is the fourth log message.');
contents = await gunzip(
await fs.readFile(path.join(__dirname, logFiles[1]))
);
t.match(contents.toString('utf8'), 'This is the third log message.');
contents = await gunzip(
await fs.readFile(path.join(__dirname, logFiles[2]))
);
t.match(contents.toString('utf8'), 'This is the second log message.');
t.end();
});
batch.test('handling of writer.writable', (t) => {
const output = [];
let writable = true;
const RollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
// eslint-disable-next-line class-methods-use-this
on() {}
// eslint-disable-next-line class-methods-use-this
get writable() {
return writable;
}
};
const fileAppender = sandbox.require('../../lib/appenders/file', {
requires: {
streamroller: {
RollingFileStream,
},
},
});
const appender = fileAppender.configure(
{ filename: 'test1.log', maxLogSize: 100 },
{
basicLayout(loggingEvent) {
return loggingEvent.data;
},
}
);
t.test('should log when writer.writable=true', (assert) => {
writable = true;
appender({ data: 'something to log' });
assert.ok(output.length, 1);
assert.match(output[output.length - 1], 'something to log');
assert.end();
});
t.test('should not log when writer.writable=false', (assert) => {
writable = false;
appender({ data: 'this should not be logged' });
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], 'this should not be logged');
assert.end();
});
t.end();
});
batch.test('when underlying stream errors', (t) => {
let consoleArgs;
let errorHandler;
const RollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === 'error') {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const fileAppender = sandbox.require('../../lib/appenders/file', {
globals: {
console: {
error(...args) {
consoleArgs = args;
},
},
},
requires: {
streamroller: {
RollingFileStream,
},
},
});
fileAppender.configure(
{ filename: 'test1.log', maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: 'aargh' });
t.test('should log the error to console.error', (assert) => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
'log4js.fileAppender - Writing to file %s, error happened '
);
assert.equal(consoleArgs[1], 'test1.log');
assert.equal(consoleArgs[2].error, 'aargh');
assert.end();
});
t.end();
});
batch.test('with removeColor fileAppender settings', async (t) => {
const testFilePlain = path.join(__dirname, 'fa-removeColor-test.log');
const testFileAsIs = path.join(__dirname, 'fa-asIs-test.log');
const logger = log4js.getLogger('default-settings');
await removeFile(testFilePlain);
await removeFile(testFileAsIs);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await removeFile(testFilePlain);
await removeFile(testFileAsIs);
});
log4js.configure({
appenders: {
plainFile: { type: 'file', filename: testFilePlain, removeColor: true },
asIsFile: { type: 'file', filename: testFileAsIs, removeColor: false },
},
categories: {
default: { appenders: ['plainFile', 'asIsFile'], level: 'debug' },
},
});
logger.info(
'This should be in the file.',
'\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m.',
{},
[]
);
await sleep(osDelay);
let fileContents = await fs.readFile(testFilePlain, 'utf8');
t.match(
fileContents,
`This should be in the file. Color should be plain. {} []${EOL}`
);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
fileContents = await fs.readFile(testFileAsIs, 'utf8');
t.match(
fileContents,
'This should be in the file.',
`\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m. {} []${EOL}`
);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.end();
});
| /* eslint max-classes-per-file: ["error", 2] */
const { test } = require('tap');
const fs = require('fs-extra');
const path = require('path');
const sandbox = require('@log4js-node/sandboxed-module');
const zlib = require('zlib');
const util = require('util');
const osDelay = process.platform === 'win32' ? 400 : 200;
const sleep = util.promisify(setTimeout);
const gunzip = util.promisify(zlib.gunzip);
const EOL = require('os').EOL || '\n';
const log4js = require('../../lib/log4js');
const removeFile = async (filename) => {
try {
await fs.unlink(filename);
} catch (e) {
// let's pretend this never happened
}
};
test('log4js fileAppender', (batch) => {
batch.test('with default fileAppender settings', async (t) => {
const testFile = path.join(__dirname, 'fa-default-test.log');
const logger = log4js.getLogger('default-settings');
await removeFile(testFile);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await removeFile(testFile);
});
log4js.configure({
appenders: { file: { type: 'file', filename: testFile } },
categories: { default: { appenders: ['file'], level: 'debug' } },
});
logger.info('This should be in the file.');
await sleep(osDelay);
const fileContents = await fs.readFile(testFile, 'utf8');
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.test('should give error if invalid filename', async (t) => {
const file = '';
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'file',
filename: file,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
new Error(`Invalid filename: ${file}`)
);
const dir = `.${path.sep}`;
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'file',
filename: dir,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
new Error(`Filename is a directory: ${dir}`)
);
t.end();
});
batch.test('should flush logs on shutdown', async (t) => {
const testFile = path.join(__dirname, 'fa-default-test.log');
const logger = log4js.getLogger('default-settings');
await removeFile(testFile);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await removeFile(testFile);
});
log4js.configure({
appenders: { test: { type: 'file', filename: testFile } },
categories: { default: { appenders: ['test'], level: 'trace' } },
});
logger.info('1');
logger.info('2');
logger.info('3');
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
const fileContents = await fs.readFile(testFile, 'utf8');
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.test('with a max file size and no backups', async (t) => {
const testFile = path.join(__dirname, 'fa-maxFileSize-test.log');
const logger = log4js.getLogger('max-file-size');
await removeFile(testFile);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await removeFile(testFile);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: 'file',
filename: testFile,
maxLogSize: 100,
backups: 0,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
});
logger.info('This is the first log message.');
logger.info('This is an intermediate log message.');
logger.info('This is the second log message.');
// wait for the file system to catch up
await sleep(osDelay * 2);
const fileContents = await fs.readFile(testFile, 'utf8');
t.match(fileContents, 'This is the second log message.');
t.equal(fileContents.indexOf('This is the first log message.'), -1);
const files = await fs.readdir(__dirname);
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-test.log')
);
t.equal(logFiles.length, 1, 'should be 1 file');
t.end();
});
batch.test('with a max file size in wrong unit mode', async (t) => {
const invalidUnit = '1Z';
const expectedError = new Error(`maxLogSize: "${invalidUnit}" is invalid`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'file',
maxLogSize: invalidUnit,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
expectedError
);
t.end();
});
batch.test('with a max file size in unit mode and no backups', async (t) => {
const testFile = path.join(__dirname, 'fa-maxFileSize-unit-test.log');
const logger = log4js.getLogger('max-file-size-unit');
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
});
// log file of 1K = 1024 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: 'file',
filename: testFile,
maxLogSize: '1K',
backups: 0,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
});
const maxLine = 22; // 1024 max file size / 47 bytes per line
for (let i = 0; i < maxLine; i++) {
logger.info('These are the log messages for the first file.'); // 46 bytes per line + '\n'
}
logger.info('This is the second log message.');
// wait for the file system to catch up
await sleep(osDelay);
const fileContents = await fs.readFile(testFile, 'utf8');
t.match(fileContents, 'This is the second log message.');
t.notMatch(fileContents, 'These are the log messages for the first file.');
const files = await fs.readdir(__dirname);
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-unit-test.log')
);
t.equal(logFiles.length, 1, 'should be 1 file');
t.end();
});
batch.test('with a max file size and 2 backups', async (t) => {
const testFile = path.join(
__dirname,
'fa-maxFileSize-with-backups-test.log'
);
const logger = log4js.getLogger('max-file-size-backups');
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1`),
removeFile(`${testFile}.2`),
]);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1`),
removeFile(`${testFile}.2`),
]);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
file: {
type: 'file',
filename: testFile,
maxLogSize: 50,
backups: 2,
},
},
categories: { default: { appenders: ['file'], level: 'debug' } },
});
logger.info('This is the first log message.');
logger.info('This is the second log message.');
logger.info('This is the third log message.');
logger.info('This is the fourth log message.');
// give the system a chance to open the stream
await sleep(osDelay);
const files = await fs.readdir(__dirname);
const logFiles = files
.sort()
.filter((file) => file.includes('fa-maxFileSize-with-backups-test.log'));
t.equal(logFiles.length, 3);
t.same(logFiles, [
'fa-maxFileSize-with-backups-test.log',
'fa-maxFileSize-with-backups-test.log.1',
'fa-maxFileSize-with-backups-test.log.2',
]);
let contents = await fs.readFile(path.join(__dirname, logFiles[0]), 'utf8');
t.match(contents, 'This is the fourth log message.');
contents = await fs.readFile(path.join(__dirname, logFiles[1]), 'utf8');
t.match(contents, 'This is the third log message.');
contents = await fs.readFile(path.join(__dirname, logFiles[2]), 'utf8');
t.match(contents, 'This is the second log message.');
t.end();
});
batch.test('with a max file size and 2 compressed backups', async (t) => {
const testFile = path.join(
__dirname,
'fa-maxFileSize-with-backups-compressed-test.log'
);
const logger = log4js.getLogger('max-file-size-backups');
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1.gz`),
removeFile(`${testFile}.2.gz`),
]);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await Promise.all([
removeFile(testFile),
removeFile(`${testFile}.1.gz`),
removeFile(`${testFile}.2.gz`),
]);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
file: {
type: 'file',
filename: testFile,
maxLogSize: 50,
backups: 2,
compress: true,
},
},
categories: { default: { appenders: ['file'], level: 'debug' } },
});
logger.info('This is the first log message.');
logger.info('This is the second log message.');
logger.info('This is the third log message.');
logger.info('This is the fourth log message.');
// give the system a chance to open the stream
await sleep(osDelay);
const files = await fs.readdir(__dirname);
const logFiles = files
.sort()
.filter((file) =>
file.includes('fa-maxFileSize-with-backups-compressed-test.log')
);
t.equal(logFiles.length, 3, 'should be 3 files');
t.same(logFiles, [
'fa-maxFileSize-with-backups-compressed-test.log',
'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
'fa-maxFileSize-with-backups-compressed-test.log.2.gz',
]);
let contents = await fs.readFile(path.join(__dirname, logFiles[0]), 'utf8');
t.match(contents, 'This is the fourth log message.');
contents = await gunzip(
await fs.readFile(path.join(__dirname, logFiles[1]))
);
t.match(contents.toString('utf8'), 'This is the third log message.');
contents = await gunzip(
await fs.readFile(path.join(__dirname, logFiles[2]))
);
t.match(contents.toString('utf8'), 'This is the second log message.');
t.end();
});
batch.test('handling of writer.writable', (t) => {
const output = [];
let writable = true;
const RollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
// eslint-disable-next-line class-methods-use-this
on() {}
// eslint-disable-next-line class-methods-use-this
get writable() {
return writable;
}
};
const fileAppender = sandbox.require('../../lib/appenders/file', {
requires: {
streamroller: {
RollingFileStream,
},
},
});
const appender = fileAppender.configure(
{ filename: 'test1.log', maxLogSize: 100 },
{
basicLayout(loggingEvent) {
return loggingEvent.data;
},
}
);
t.test('should log when writer.writable=true', (assert) => {
writable = true;
appender({ data: 'something to log' });
assert.ok(output.length, 1);
assert.match(output[output.length - 1], 'something to log');
assert.end();
});
t.test('should not log when writer.writable=false', (assert) => {
writable = false;
appender({ data: 'this should not be logged' });
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], 'this should not be logged');
assert.end();
});
t.end();
});
batch.test('when underlying stream errors', (t) => {
let consoleArgs;
let errorHandler;
const RollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === 'error') {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const fileAppender = sandbox.require('../../lib/appenders/file', {
globals: {
console: {
error(...args) {
consoleArgs = args;
},
},
},
requires: {
streamroller: {
RollingFileStream,
},
},
});
fileAppender.configure(
{ filename: 'test1.log', maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: 'aargh' });
t.test('should log the error to console.error', (assert) => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
'log4js.fileAppender - Writing to file %s, error happened '
);
assert.equal(consoleArgs[1], 'test1.log');
assert.equal(consoleArgs[2].error, 'aargh');
assert.end();
});
t.end();
});
batch.test('with removeColor fileAppender settings', async (t) => {
const testFilePlain = path.join(__dirname, 'fa-removeColor-test.log');
const testFileAsIs = path.join(__dirname, 'fa-asIs-test.log');
const logger = log4js.getLogger('default-settings');
await removeFile(testFilePlain);
await removeFile(testFileAsIs);
t.teardown(async () => {
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
await removeFile(testFilePlain);
await removeFile(testFileAsIs);
});
log4js.configure({
appenders: {
plainFile: { type: 'file', filename: testFilePlain, removeColor: true },
asIsFile: { type: 'file', filename: testFileAsIs, removeColor: false },
},
categories: {
default: { appenders: ['plainFile', 'asIsFile'], level: 'debug' },
},
});
logger.info(
'This should be in the file.',
'\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m.',
{},
[]
);
await sleep(osDelay);
let fileContents = await fs.readFile(testFilePlain, 'utf8');
t.match(
fileContents,
`This should be in the file. Color should be plain. {} []${EOL}`
);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
fileContents = await fs.readFile(testFileAsIs, 'utf8');
t.match(
fileContents,
'This should be in the file.',
`\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m. {} []${EOL}`
);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./examples/loggly-appender.js | // Note that loggly appender needs node-loggly to work.
// If you haven't got node-loggly installed, you'll get cryptic
// "cannot find module" errors when using the loggly appender
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
console: {
type: 'console',
},
loggly: {
type: 'loggly',
token: '12345678901234567890',
subdomain: 'your-subdomain',
tags: ['test'],
},
},
categories: {
default: { appenders: ['console'], level: 'info' },
loggly: { appenders: ['loggly'], level: 'info' },
},
});
const logger = log4js.getLogger('loggly');
logger.info('Test log message');
// logger.debug("Test log message");
| // Note that loggly appender needs node-loggly to work.
// If you haven't got node-loggly installed, you'll get cryptic
// "cannot find module" errors when using the loggly appender
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
console: {
type: 'console',
},
loggly: {
type: 'loggly',
token: '12345678901234567890',
subdomain: 'your-subdomain',
tags: ['test'],
},
},
categories: {
default: { appenders: ['console'], level: 'info' },
loggly: { appenders: ['loggly'], level: 'info' },
},
});
const logger = log4js.getLogger('loggly');
logger.info('Test log message');
// logger.debug("Test log message");
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./examples/logstashUDP.js | const log4js = require('../lib/log4js');
/*
Sample logstash config:
udp {
codec => json
port => 10001
queue_size => 2
workers => 2
type => myAppType
}
*/
log4js.configure({
appenders: {
console: {
type: 'console',
},
logstash: {
host: '127.0.0.1',
port: 10001,
type: 'logstashUDP',
logType: 'myAppType', // Optional, defaults to 'category'
fields: {
// Optional, will be added to the 'fields' object in logstash
field1: 'value1',
field2: 'value2',
},
layout: {
type: 'pattern',
pattern: '%m',
},
},
},
categories: {
default: { appenders: ['console', 'logstash'], level: 'info' },
},
});
const logger = log4js.getLogger('myLogger');
logger.info('Test log message %s', 'arg1', 'arg2');
| const log4js = require('../lib/log4js');
/*
Sample logstash config:
udp {
codec => json
port => 10001
queue_size => 2
workers => 2
type => myAppType
}
*/
log4js.configure({
appenders: {
console: {
type: 'console',
},
logstash: {
host: '127.0.0.1',
port: 10001,
type: 'logstashUDP',
logType: 'myAppType', // Optional, defaults to 'category'
fields: {
// Optional, will be added to the 'fields' object in logstash
field1: 'value1',
field2: 'value2',
},
layout: {
type: 'pattern',
pattern: '%m',
},
},
},
categories: {
default: { appenders: ['console', 'logstash'], level: 'info' },
},
});
const logger = log4js.getLogger('myLogger');
logger.info('Test log message %s', 'arg1', 'arg2');
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/multiprocess-worker.js | if (process.argv.indexOf('start-multiprocess-worker') >= 0) {
const log4js = require('../lib/log4js');
const port = parseInt(process.argv[process.argv.length - 1], 10);
log4js.configure({
appenders: {
multi: { type: 'multiprocess', mode: 'worker', loggerPort: port },
},
categories: { default: { appenders: ['multi'], level: 'debug' } },
});
log4js.getLogger('worker').info('Logging from worker');
log4js.shutdown(() => {
process.send('worker is done');
});
}
| if (process.argv.indexOf('start-multiprocess-worker') >= 0) {
const log4js = require('../lib/log4js');
const port = parseInt(process.argv[process.argv.length - 1], 10);
log4js.configure({
appenders: {
multi: { type: 'multiprocess', mode: 'worker', loggerPort: port },
},
categories: { default: { appenders: ['multi'], level: 'debug' } },
});
log4js.getLogger('worker').info('Logging from worker');
log4js.shutdown(() => {
process.send('worker is done');
});
}
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./examples/flush-on-exit.js | /**
* run this, then "ab -c 10 -n 100 localhost:4444/" to test (in
* another shell)
*/
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
cheese: { type: 'file', filename: 'cheese.log' },
},
categories: {
default: { appenders: ['cheese'], level: 'debug' },
},
});
const logger = log4js.getLogger('cheese');
const http = require('http');
http
.createServer((request, response) => {
response.writeHead(200, { 'Content-Type': 'text/plain' });
const rd = Math.random() * 50;
logger.info(`hello ${rd}`);
response.write('hello ');
if (Math.floor(rd) === 30) {
log4js.shutdown(() => {
process.exit(1);
});
}
response.end();
})
.listen(4444);
| /**
* run this, then "ab -c 10 -n 100 localhost:4444/" to test (in
* another shell)
*/
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
cheese: { type: 'file', filename: 'cheese.log' },
},
categories: {
default: { appenders: ['cheese'], level: 'debug' },
},
});
const logger = log4js.getLogger('cheese');
const http = require('http');
http
.createServer((request, response) => {
response.writeHead(200, { 'Content-Type': 'text/plain' });
const rd = Math.random() * 50;
logger.info(`hello ${rd}`);
response.write('hello ');
if (Math.floor(rd) === 30) {
log4js.shutdown(() => {
process.exit(1);
});
}
response.end();
})
.listen(4444);
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/LoggingEvent-test.js | const flatted = require('flatted');
const { test } = require('tap');
const LoggingEvent = require('../../lib/LoggingEvent');
const levels = require('../../lib/levels');
test('LoggingEvent', (batch) => {
batch.test('should serialise to flatted', (t) => {
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message', parseInt('abc', 10), 1 / 0, -1 / 0, undefined],
{
user: 'bob',
}
);
// set the event date to a known value
event.startTime = new Date(Date.UTC(2018, 1, 4, 18, 30, 23, 10));
const rehydratedEvent = flatted.parse(event.serialise());
t.equal(rehydratedEvent.startTime, '2018-02-04T18:30:23.010Z');
t.equal(rehydratedEvent.categoryName, 'cheese');
t.equal(rehydratedEvent.level.levelStr, 'DEBUG');
t.equal(rehydratedEvent.data.length, 5);
t.equal(rehydratedEvent.data[0], 'log message');
t.equal(rehydratedEvent.data[1], 'NaN');
t.equal(rehydratedEvent.data[2], 'Infinity');
t.equal(rehydratedEvent.data[3], '-Infinity');
t.equal(rehydratedEvent.data[4], 'undefined');
t.equal(rehydratedEvent.context.user, 'bob');
t.end();
});
batch.test('should deserialise from flatted', (t) => {
const dehydratedEvent = flatted.stringify({
startTime: '2018-02-04T10:25:23.010Z',
categoryName: 'biscuits',
level: {
levelStr: 'INFO',
},
data: ['some log message', { x: 1 }],
context: { thing: 'otherThing' },
pid: '1234',
functionName: 'bound',
fileName: 'domain.js',
lineNumber: 421,
columnNumber: 15,
callStack: 'at bound (domain.js:421:15)\n',
});
const event = LoggingEvent.deserialise(dehydratedEvent);
t.type(event, LoggingEvent);
t.same(event.startTime, new Date(Date.UTC(2018, 1, 4, 10, 25, 23, 10)));
t.equal(event.categoryName, 'biscuits');
t.same(event.level, levels.INFO);
t.equal(event.data[0], 'some log message');
t.equal(event.data[1].x, 1);
t.equal(event.context.thing, 'otherThing');
t.equal(event.pid, '1234');
t.equal(event.functionName, 'bound');
t.equal(event.fileName, 'domain.js');
t.equal(event.lineNumber, 421);
t.equal(event.columnNumber, 15);
t.equal(event.callStack, 'at bound (domain.js:421:15)\n');
t.end();
});
batch.test('Should correct construct with/without location info', (t) => {
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = '/log4js-node/test/tap/layouts-test.js';
const lineNumber = 1;
const columnNumber = 14;
const className = '';
const functionName = '';
const functionAlias = '';
const callerName = '';
const location = {
fileName,
lineNumber,
columnNumber,
callStack,
className,
functionName,
functionAlias,
callerName,
};
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message'],
{ user: 'bob' },
location
);
t.equal(event.fileName, fileName);
t.equal(event.lineNumber, lineNumber);
t.equal(event.columnNumber, columnNumber);
t.equal(event.callStack, callStack);
t.equal(event.className, className);
t.equal(event.functionName, functionName);
t.equal(event.functionAlias, functionAlias);
t.equal(event.callerName, callerName);
const event2 = new LoggingEvent('cheese', levels.DEBUG, ['log message'], {
user: 'bob',
});
t.equal(event2.fileName, undefined);
t.equal(event2.lineNumber, undefined);
t.equal(event2.columnNumber, undefined);
t.equal(event2.callStack, undefined);
t.equal(event2.className, undefined);
t.equal(event2.functionName, undefined);
t.equal(event2.functionAlias, undefined);
t.equal(event2.callerName, undefined);
t.end();
});
batch.test('Should contain class, method and alias names', (t) => {
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at Foo.bar [as baz] (repl:1:14)\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = '/log4js-node/test/tap/layouts-test.js';
const lineNumber = 1;
const columnNumber = 14;
const className = 'Foo';
const functionName = 'bar';
const functionAlias = 'baz';
const callerName = 'Foo.bar [as baz]';
const location = {
fileName,
lineNumber,
columnNumber,
callStack,
className,
functionName,
functionAlias,
callerName,
};
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message'],
{ user: 'bob' },
location
);
t.equal(event.fileName, fileName);
t.equal(event.lineNumber, lineNumber);
t.equal(event.columnNumber, columnNumber);
t.equal(event.callStack, callStack);
t.equal(event.className, className);
t.equal(event.functionName, functionName);
t.equal(event.functionAlias, functionAlias);
t.equal(event.callerName, callerName);
t.end();
});
batch.end();
});
| const flatted = require('flatted');
const { test } = require('tap');
const LoggingEvent = require('../../lib/LoggingEvent');
const levels = require('../../lib/levels');
test('LoggingEvent', (batch) => {
batch.test('should serialise to flatted', (t) => {
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message', parseInt('abc', 10), 1 / 0, -1 / 0, undefined],
{
user: 'bob',
}
);
// set the event date to a known value
event.startTime = new Date(Date.UTC(2018, 1, 4, 18, 30, 23, 10));
const rehydratedEvent = flatted.parse(event.serialise());
t.equal(rehydratedEvent.startTime, '2018-02-04T18:30:23.010Z');
t.equal(rehydratedEvent.categoryName, 'cheese');
t.equal(rehydratedEvent.level.levelStr, 'DEBUG');
t.equal(rehydratedEvent.data.length, 5);
t.equal(rehydratedEvent.data[0], 'log message');
t.equal(rehydratedEvent.data[1], 'NaN');
t.equal(rehydratedEvent.data[2], 'Infinity');
t.equal(rehydratedEvent.data[3], '-Infinity');
t.equal(rehydratedEvent.data[4], 'undefined');
t.equal(rehydratedEvent.context.user, 'bob');
t.end();
});
batch.test('should deserialise from flatted', (t) => {
const dehydratedEvent = flatted.stringify({
startTime: '2018-02-04T10:25:23.010Z',
categoryName: 'biscuits',
level: {
levelStr: 'INFO',
},
data: ['some log message', { x: 1 }],
context: { thing: 'otherThing' },
pid: '1234',
functionName: 'bound',
fileName: 'domain.js',
lineNumber: 421,
columnNumber: 15,
callStack: 'at bound (domain.js:421:15)\n',
});
const event = LoggingEvent.deserialise(dehydratedEvent);
t.type(event, LoggingEvent);
t.same(event.startTime, new Date(Date.UTC(2018, 1, 4, 10, 25, 23, 10)));
t.equal(event.categoryName, 'biscuits');
t.same(event.level, levels.INFO);
t.equal(event.data[0], 'some log message');
t.equal(event.data[1].x, 1);
t.equal(event.context.thing, 'otherThing');
t.equal(event.pid, '1234');
t.equal(event.functionName, 'bound');
t.equal(event.fileName, 'domain.js');
t.equal(event.lineNumber, 421);
t.equal(event.columnNumber, 15);
t.equal(event.callStack, 'at bound (domain.js:421:15)\n');
t.end();
});
batch.test('Should correct construct with/without location info', (t) => {
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = '/log4js-node/test/tap/layouts-test.js';
const lineNumber = 1;
const columnNumber = 14;
const className = '';
const functionName = '';
const functionAlias = '';
const callerName = '';
const location = {
fileName,
lineNumber,
columnNumber,
callStack,
className,
functionName,
functionAlias,
callerName,
};
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message'],
{ user: 'bob' },
location
);
t.equal(event.fileName, fileName);
t.equal(event.lineNumber, lineNumber);
t.equal(event.columnNumber, columnNumber);
t.equal(event.callStack, callStack);
t.equal(event.className, className);
t.equal(event.functionName, functionName);
t.equal(event.functionAlias, functionAlias);
t.equal(event.callerName, callerName);
const event2 = new LoggingEvent('cheese', levels.DEBUG, ['log message'], {
user: 'bob',
});
t.equal(event2.fileName, undefined);
t.equal(event2.lineNumber, undefined);
t.equal(event2.columnNumber, undefined);
t.equal(event2.callStack, undefined);
t.equal(event2.className, undefined);
t.equal(event2.functionName, undefined);
t.equal(event2.functionAlias, undefined);
t.equal(event2.callerName, undefined);
t.end();
});
batch.test('Should contain class, method and alias names', (t) => {
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at Foo.bar [as baz] (repl:1:14)\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = '/log4js-node/test/tap/layouts-test.js';
const lineNumber = 1;
const columnNumber = 14;
const className = 'Foo';
const functionName = 'bar';
const functionAlias = 'baz';
const callerName = 'Foo.bar [as baz]';
const location = {
fileName,
lineNumber,
columnNumber,
callStack,
className,
functionName,
functionAlias,
callerName,
};
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message'],
{ user: 'bob' },
location
);
t.equal(event.fileName, fileName);
t.equal(event.lineNumber, lineNumber);
t.equal(event.columnNumber, columnNumber);
t.equal(event.callStack, callStack);
t.equal(event.className, className);
t.equal(event.functionName, functionName);
t.equal(event.functionAlias, functionAlias);
t.equal(event.callerName, callerName);
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/multiprocess-shutdown-test.js | const { test } = require('tap');
const net = require('net');
const childProcess = require('child_process');
const sandbox = require('@log4js-node/sandboxed-module');
const log4js = require('../../lib/log4js');
test('multiprocess appender shutdown (master)', { timeout: 10000 }, (t) => {
log4js.configure({
appenders: {
stdout: { type: 'stdout' },
multi: {
type: 'multiprocess',
mode: 'master',
loggerPort: 12345,
appender: 'stdout',
},
},
categories: { default: { appenders: ['multi'], level: 'debug' } },
});
setTimeout(() => {
log4js.shutdown(() => {
setTimeout(() => {
net
.connect({ port: 12345 }, () => {
t.fail('connection should not still work');
t.end();
})
.on('error', (err) => {
t.ok(err, 'we got a connection error');
t.end();
});
}, 1000);
});
}, 1000);
});
test('multiprocess appender shutdown (worker)', (t) => {
const fakeConnection = {
evts: {},
msgs: [],
on(evt, cb) {
this.evts[evt] = cb;
},
write(data) {
this.msgs.push(data);
},
removeAllListeners() {
this.removeAllListenersCalled = true;
},
end(cb) {
this.endCb = cb;
},
};
const logLib = sandbox.require('../../lib/log4js', {
requires: {
net: {
createConnection() {
return fakeConnection;
},
},
},
});
logLib.configure({
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
categories: { default: { appenders: ['worker'], level: 'debug' } },
});
logLib
.getLogger()
.info(
'Putting something in the buffer before the connection is established'
);
// nothing been written yet.
t.equal(fakeConnection.msgs.length, 0);
let shutdownFinished = false;
logLib.shutdown(() => {
shutdownFinished = true;
});
// still nothing been written yet.
t.equal(fakeConnection.msgs.length, 0);
fakeConnection.evts.connect();
setTimeout(() => {
t.equal(fakeConnection.msgs.length, 2);
t.ok(fakeConnection.removeAllListenersCalled);
fakeConnection.endCb();
t.ok(shutdownFinished);
t.end();
}, 500);
});
test('multiprocess appender crash (worker)', (t) => {
const loggerPort = 12346;
const vcr = require('../../lib/appenders/recording');
log4js.configure({
appenders: {
console: { type: 'recording' },
multi: {
type: 'multiprocess',
mode: 'master',
loggerPort,
appender: 'console',
},
},
categories: { default: { appenders: ['multi'], level: 'debug' } },
});
const worker = childProcess.fork(require.resolve('../multiprocess-worker'), [
'start-multiprocess-worker',
loggerPort,
]);
worker.on('message', (m) => {
if (m === 'worker is done') {
setTimeout(() => {
worker.kill();
t.equal(vcr.replay()[0].data[0], 'Logging from worker');
log4js.shutdown(() => t.end());
}, 100);
}
});
});
| const { test } = require('tap');
const net = require('net');
const childProcess = require('child_process');
const sandbox = require('@log4js-node/sandboxed-module');
const log4js = require('../../lib/log4js');
test('multiprocess appender shutdown (master)', { timeout: 10000 }, (t) => {
log4js.configure({
appenders: {
stdout: { type: 'stdout' },
multi: {
type: 'multiprocess',
mode: 'master',
loggerPort: 12345,
appender: 'stdout',
},
},
categories: { default: { appenders: ['multi'], level: 'debug' } },
});
setTimeout(() => {
log4js.shutdown(() => {
setTimeout(() => {
net
.connect({ port: 12345 }, () => {
t.fail('connection should not still work');
t.end();
})
.on('error', (err) => {
t.ok(err, 'we got a connection error');
t.end();
});
}, 1000);
});
}, 1000);
});
test('multiprocess appender shutdown (worker)', (t) => {
const fakeConnection = {
evts: {},
msgs: [],
on(evt, cb) {
this.evts[evt] = cb;
},
write(data) {
this.msgs.push(data);
},
removeAllListeners() {
this.removeAllListenersCalled = true;
},
end(cb) {
this.endCb = cb;
},
};
const logLib = sandbox.require('../../lib/log4js', {
requires: {
net: {
createConnection() {
return fakeConnection;
},
},
},
});
logLib.configure({
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
categories: { default: { appenders: ['worker'], level: 'debug' } },
});
logLib
.getLogger()
.info(
'Putting something in the buffer before the connection is established'
);
// nothing been written yet.
t.equal(fakeConnection.msgs.length, 0);
let shutdownFinished = false;
logLib.shutdown(() => {
shutdownFinished = true;
});
// still nothing been written yet.
t.equal(fakeConnection.msgs.length, 0);
fakeConnection.evts.connect();
setTimeout(() => {
t.equal(fakeConnection.msgs.length, 2);
t.ok(fakeConnection.removeAllListenersCalled);
fakeConnection.endCb();
t.ok(shutdownFinished);
t.end();
}, 500);
});
test('multiprocess appender crash (worker)', (t) => {
const loggerPort = 12346;
const vcr = require('../../lib/appenders/recording');
log4js.configure({
appenders: {
console: { type: 'recording' },
multi: {
type: 'multiprocess',
mode: 'master',
loggerPort,
appender: 'console',
},
},
categories: { default: { appenders: ['multi'], level: 'debug' } },
});
const worker = childProcess.fork(require.resolve('../multiprocess-worker'), [
'start-multiprocess-worker',
loggerPort,
]);
worker.on('message', (m) => {
if (m === 'worker is done') {
setTimeout(() => {
worker.kill();
t.equal(vcr.replay()[0].data[0], 'Logging from worker');
log4js.shutdown(() => t.end());
}, 100);
}
});
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/recording.md | # Recording Appender
This appender stores the log events in memory. It is mainly useful for testing (see the tests for the category filter, for instance).
## Configuration
- `type` - `recording`
There is no other configuration for this appender.
## Usage
The array that stores log events is shared across all recording appender instances, and is accessible from the recording module. `require('<LOG4JS LIB DIR>/appenders/recording')` returns a module with the following functions exported:
- `replay` - returns `Array<LogEvent>` - get all the events recorded.
- `playback` - synonym for `replay`
- `reset` - clears the array of events recorded.
- `erase` - synonyms for `reset`
## Example
```javascript
const recording = require("log4js/lib/appenders/recording");
const log4js = require("log4js");
log4js.configure({
appenders: { vcr: { type: "recording" } },
categories: { default: { appenders: ["vcr"], level: "info" } },
});
const logger = log4js.getLogger();
logger.info("some log event");
const events = recording.replay(); // events is an array of LogEvent objects.
recording.erase(); // clear the appender's array.
```
| # Recording Appender
This appender stores the log events in memory. It is mainly useful for testing (see the tests for the category filter, for instance).
## Configuration
- `type` - `recording`
There is no other configuration for this appender.
## Usage
The array that stores log events is shared across all recording appender instances, and is accessible from the recording module. `require('<LOG4JS LIB DIR>/appenders/recording')` returns a module with the following functions exported:
- `replay` - returns `Array<LogEvent>` - get all the events recorded.
- `playback` - synonym for `replay`
- `reset` - clears the array of events recorded.
- `erase` - synonyms for `reset`
## Example
```javascript
const recording = require("log4js/lib/appenders/recording");
const log4js = require("log4js");
log4js.configure({
appenders: { vcr: { type: "recording" } },
categories: { default: { appenders: ["vcr"], level: "info" } },
});
const logger = log4js.getLogger();
logger.info("some log event");
const events = recording.replay(); // events is an array of LogEvent objects.
recording.erase(); // clear the appender's array.
```
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/contributors.md | # Contributors
Many people have helped make log4js what it is today. Here's a list of everyone who has contributed to the code. There are lots of people who've helped by submitting bug reports or pull requests that I haven't merged, but I have used their ideas to implement a different way. Thanks to you all. This library also owes a huge amount to the [original log4js project](https://github.com/stritti/log4js). If you'd like to help out, take a look at the [contributor guidelines](contrib-guidelines.md).
<ul>
{% for contributor in site.github.contributors %}
<li><a href="{{ contributor.html_url }}">{{ contributor.login }}</a></li>
{% endfor %}
</ul>
| # Contributors
Many people have helped make log4js what it is today. Here's a list of everyone who has contributed to the code. There are lots of people who've helped by submitting bug reports or pull requests that I haven't merged, but I have used their ideas to implement a different way. Thanks to you all. This library also owes a huge amount to the [original log4js project](https://github.com/stritti/log4js). If you'd like to help out, take a look at the [contributor guidelines](contrib-guidelines.md).
<ul>
{% for contributor in site.github.contributors %}
<li><a href="{{ contributor.html_url }}">{{ contributor.login }}</a></li>
{% endfor %}
</ul>
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/setLevel-asymmetry-test.js | // This test shows an asymmetry between setLevel and isLevelEnabled
// (in log4js-node@0.4.3 and earlier):
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently
// does not (sets the level to TRACE).
// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
//
const { test } = require('tap');
const log4js = require('../../lib/log4js');
const logger = log4js.getLogger('test-setLevel-asymmetry');
// Define the array of levels as string to iterate over.
const strLevels = ['Trace', 'Debug', 'Info', 'Warn', 'Error', 'Fatal'];
const log4jsLevels = strLevels.map(log4js.levels.getLevel);
test('log4js setLevel', (batch) => {
strLevels.forEach((strLevel) => {
batch.test(`is called with a ${strLevel} as string`, (t) => {
const log4jsLevel = log4js.levels.getLevel(strLevel);
t.test('should convert string to level correctly', (assert) => {
logger.level = strLevel;
log4jsLevels.forEach((level) => {
assert.equal(
logger.isLevelEnabled(level),
log4jsLevel.isLessThanOrEqualTo(level)
);
});
assert.end();
});
t.test('should also accept a Level', (assert) => {
logger.level = log4jsLevel;
log4jsLevels.forEach((level) => {
assert.equal(
logger.isLevelEnabled(level),
log4jsLevel.isLessThanOrEqualTo(level)
);
});
assert.end();
});
t.end();
});
});
batch.end();
});
| // This test shows an asymmetry between setLevel and isLevelEnabled
// (in log4js-node@0.4.3 and earlier):
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently
// does not (sets the level to TRACE).
// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
//
const { test } = require('tap');
const log4js = require('../../lib/log4js');
const logger = log4js.getLogger('test-setLevel-asymmetry');
// Define the array of levels as string to iterate over.
const strLevels = ['Trace', 'Debug', 'Info', 'Warn', 'Error', 'Fatal'];
const log4jsLevels = strLevels.map(log4js.levels.getLevel);
test('log4js setLevel', (batch) => {
strLevels.forEach((strLevel) => {
batch.test(`is called with a ${strLevel} as string`, (t) => {
const log4jsLevel = log4js.levels.getLevel(strLevel);
t.test('should convert string to level correctly', (assert) => {
logger.level = strLevel;
log4jsLevels.forEach((level) => {
assert.equal(
logger.isLevelEnabled(level),
log4jsLevel.isLessThanOrEqualTo(level)
);
});
assert.end();
});
t.test('should also accept a Level', (assert) => {
logger.level = log4jsLevel;
log4jsLevels.forEach((level) => {
assert.equal(
logger.isLevelEnabled(level),
log4jsLevel.isLessThanOrEqualTo(level)
);
});
assert.end();
});
t.end();
});
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./examples/example.js | 'use strict';
const log4js = require('../lib/log4js');
// log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' },
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
},
});
// a custom logger outside of the log4js/lib/appenders directory can be accessed like so
// log4js.configure({
// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } }
// ...
// });
const logger = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', {
some: 'otherObject',
useful_for: 'debug purposes',
});
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.log('Something funny about cheese.');
logger.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger.error('Cheese %s is too ripe!', 'gouda');
logger.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
| 'use strict';
const log4js = require('../lib/log4js');
// log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' },
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
},
});
// a custom logger outside of the log4js/lib/appenders directory can be accessed like so
// log4js.configure({
// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } }
// ...
// });
const logger = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', {
some: 'otherObject',
useful_for: 'debug purposes',
});
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.log('Something funny about cheese.');
logger.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger.error('Cheese %s is too ripe!', 'gouda');
logger.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/writing-appenders.md | # Writing Appenders for Log4js
Log4js can load appenders from outside its core set. To add a custom appender, the easiest way is to make it a stand-alone module and publish to npm. You can also load appenders from your own application, but they must be defined in a module.
## Loading mechanism
When log4js parses your configuration, it loops through the defined appenders. For each one, it will `require` the appender initially using the `type` value prepended with './appenders' as the module identifier - this is to try loading from the core appenders first. If that fails (the module could not be found in the core appenders), then log4js will try to require the module using variations of the `type` value.
Log4js checks the following places (in this order) for appenders based on the type value:
1. Bundled core appenders (within appenders directory): `require('./' + type)`
2. node_modules: `require(type)`
3. relative to the main file of your application: `require(path.dirname(require.main.filename) + '/' + type)`
4. relative to the process' current working directory: `require(process.cwd() + '/' + type)`
If that fails, an error will be raised.
## Appender Modules
An appender module should export a single function called `configure`. The function should accept the following arguments:
- `config` - `object` - the appender's configuration object
- `layouts` - `module` - gives access to the [layouts](layouts.md) module, which most appenders will need
- `layout` - `function(type, config)` - this is the main function that appenders will use to find a layout
- `findAppender` - `function(name)` - if your appender is a wrapper around another appender (like the [logLevelFilter](logLevelFilter.md) for example), this function can be used to find another appender by name
- `levels` - `module` - gives access to the [levels](levels.md) module, which most appenders will need
`configure` should return a function which accepts a logEvent, which is the appender itself. One of the simplest examples is the [stdout](stdout.md) appender. Let's run through the code.
## Example
```javascript
// This is the function that generates an appender function
function stdoutAppender(layout, timezoneOffset) {
// This is the appender function itself
return (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
}
// stdout configure doesn't need to use findAppender, or levels
function configure(config, layouts) {
// the default layout for the appender
let layout = layouts.colouredLayout;
// check if there is another layout specified
if (config.layout) {
// load the layout
layout = layouts.layout(config.layout.type, config.layout);
}
//create a new appender instance
return stdoutAppender(layout, config.timezoneOffset);
}
//export the only function needed
exports.configure = configure;
```
# Shutdown functions
It's a good idea to implement a `shutdown` function on your appender instances. This function will get called by `log4js.shutdown` and signals that `log4js` has been asked to stop logging. Usually this is because of a fatal exception, or the application is being stopped. Your shutdown function should make sure that all asynchronous operations finish, and that any resources are cleaned up. The function must be named `shutdown`, take one callback argument, and be a property of the appender instance. Let's add a shutdown function to the `stdout` appender as an example.
## Example (shutdown)
```javascript
// This is the function that generates an appender function
function stdoutAppender(layout, timezoneOffset) {
// This is the appender function itself
const appender = (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
// add a shutdown function.
appender.shutdown = (done) => {
process.stdout.write("", done);
};
return appender;
}
// ... rest of the code as above
```
| # Writing Appenders for Log4js
Log4js can load appenders from outside its core set. To add a custom appender, the easiest way is to make it a stand-alone module and publish to npm. You can also load appenders from your own application, but they must be defined in a module.
## Loading mechanism
When log4js parses your configuration, it loops through the defined appenders. For each one, it will `require` the appender initially using the `type` value prepended with './appenders' as the module identifier - this is to try loading from the core appenders first. If that fails (the module could not be found in the core appenders), then log4js will try to require the module using variations of the `type` value.
Log4js checks the following places (in this order) for appenders based on the type value:
1. Bundled core appenders (within appenders directory): `require('./' + type)`
2. node_modules: `require(type)`
3. relative to the main file of your application: `require(path.dirname(require.main.filename) + '/' + type)`
4. relative to the process' current working directory: `require(process.cwd() + '/' + type)`
If that fails, an error will be raised.
## Appender Modules
An appender module should export a single function called `configure`. The function should accept the following arguments:
- `config` - `object` - the appender's configuration object
- `layouts` - `module` - gives access to the [layouts](layouts.md) module, which most appenders will need
- `layout` - `function(type, config)` - this is the main function that appenders will use to find a layout
- `findAppender` - `function(name)` - if your appender is a wrapper around another appender (like the [logLevelFilter](logLevelFilter.md) for example), this function can be used to find another appender by name
- `levels` - `module` - gives access to the [levels](levels.md) module, which most appenders will need
`configure` should return a function which accepts a logEvent, which is the appender itself. One of the simplest examples is the [stdout](stdout.md) appender. Let's run through the code.
## Example
```javascript
// This is the function that generates an appender function
function stdoutAppender(layout, timezoneOffset) {
// This is the appender function itself
return (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
}
// stdout configure doesn't need to use findAppender, or levels
function configure(config, layouts) {
// the default layout for the appender
let layout = layouts.colouredLayout;
// check if there is another layout specified
if (config.layout) {
// load the layout
layout = layouts.layout(config.layout.type, config.layout);
}
//create a new appender instance
return stdoutAppender(layout, config.timezoneOffset);
}
//export the only function needed
exports.configure = configure;
```
# Shutdown functions
It's a good idea to implement a `shutdown` function on your appender instances. This function will get called by `log4js.shutdown` and signals that `log4js` has been asked to stop logging. Usually this is because of a fatal exception, or the application is being stopped. Your shutdown function should make sure that all asynchronous operations finish, and that any resources are cleaned up. The function must be named `shutdown`, take one callback argument, and be a property of the appender instance. Let's add a shutdown function to the `stdout` appender as an example.
## Example (shutdown)
```javascript
// This is the function that generates an appender function
function stdoutAppender(layout, timezoneOffset) {
// This is the appender function itself
const appender = (loggingEvent) => {
process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`);
};
// add a shutdown function.
appender.shutdown = (done) => {
process.stdout.write("", done);
};
return appender;
}
// ... rest of the code as above
```
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/stdout.md | # Standard Output Appender
This appender writes all log events to the standard output stream. It is the default appender for log4js.
# Configuration
- `type` - `stdout`
- `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
# Example
```javascript
log4js.configure({
appenders: { out: { type: "stdout" } },
categories: { default: { appenders: ["out"], level: "info" } },
});
```
| # Standard Output Appender
This appender writes all log events to the standard output stream. It is the default appender for log4js.
# Configuration
- `type` - `stdout`
- `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md)
# Example
```javascript
log4js.configure({
appenders: { out: { type: "stdout" } },
categories: { default: { appenders: ["out"], level: "info" } },
});
```
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./lib/appenders/logLevelFilter.js | function logLevelFilter(minLevelString, maxLevelString, appender, levels) {
const minLevel = levels.getLevel(minLevelString);
const maxLevel = levels.getLevel(maxLevelString, levels.FATAL);
return (logEvent) => {
const eventLevel = logEvent.level;
if (
minLevel.isLessThanOrEqualTo(eventLevel) &&
maxLevel.isGreaterThanOrEqualTo(eventLevel)
) {
appender(logEvent);
}
};
}
function configure(config, layouts, findAppender, levels) {
const appender = findAppender(config.appender);
return logLevelFilter(config.level, config.maxLevel, appender, levels);
}
module.exports.configure = configure;
| function logLevelFilter(minLevelString, maxLevelString, appender, levels) {
const minLevel = levels.getLevel(minLevelString);
const maxLevel = levels.getLevel(maxLevelString, levels.FATAL);
return (logEvent) => {
const eventLevel = logEvent.level;
if (
minLevel.isLessThanOrEqualTo(eventLevel) &&
maxLevel.isGreaterThanOrEqualTo(eventLevel)
) {
appender(logEvent);
}
};
}
function configure(config, layouts, findAppender, levels) {
const appender = findAppender(config.appender);
return logLevelFilter(config.level, config.maxLevel, appender, levels);
}
module.exports.configure = configure;
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/levels-test.js | const { test } = require('tap');
const levels = require('../../lib/levels');
function assertThat(assert, level) {
function assertForEach(assertion, testFn, otherLevels) {
otherLevels.forEach((other) => {
assertion.call(assert, testFn.call(level, other));
});
}
return {
isLessThanOrEqualTo(lvls) {
assertForEach(assert.ok, level.isLessThanOrEqualTo, lvls);
},
isNotLessThanOrEqualTo(lvls) {
assertForEach(assert.notOk, level.isLessThanOrEqualTo, lvls);
},
isGreaterThanOrEqualTo(lvls) {
assertForEach(assert.ok, level.isGreaterThanOrEqualTo, lvls);
},
isNotGreaterThanOrEqualTo(lvls) {
assertForEach(assert.notOk, level.isGreaterThanOrEqualTo, lvls);
},
isEqualTo(lvls) {
assertForEach(assert.ok, level.isEqualTo, lvls);
},
isNotEqualTo(lvls) {
assertForEach(assert.notOk, level.isEqualTo, lvls);
},
};
}
test('levels', (batch) => {
batch.test('values', (t) => {
t.test('should define some levels', (assert) => {
assert.ok(levels.ALL);
assert.ok(levels.TRACE);
assert.ok(levels.DEBUG);
assert.ok(levels.INFO);
assert.ok(levels.WARN);
assert.ok(levels.ERROR);
assert.ok(levels.FATAL);
assert.ok(levels.MARK);
assert.ok(levels.OFF);
assert.end();
});
t.test('ALL', (assert) => {
const all = levels.ALL;
assertThat(assert, all).isLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, all).isNotGreaterThanOrEqualTo([
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, all).isEqualTo([levels.getLevel('ALL')]);
assertThat(assert, all).isNotEqualTo([
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('TRACE', (assert) => {
const trace = levels.TRACE;
assertThat(assert, trace).isLessThanOrEqualTo([
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, trace).isNotLessThanOrEqualTo([levels.ALL]);
assertThat(assert, trace).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
]);
assertThat(assert, trace).isNotGreaterThanOrEqualTo([
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, trace).isEqualTo([levels.getLevel('TRACE')]);
assertThat(assert, trace).isNotEqualTo([
levels.ALL,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('DEBUG', (assert) => {
const debug = levels.DEBUG;
assertThat(assert, debug).isLessThanOrEqualTo([
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, debug).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
]);
assertThat(assert, debug).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
]);
assertThat(assert, debug).isNotGreaterThanOrEqualTo([
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, debug).isEqualTo([levels.getLevel('DEBUG')]);
assertThat(assert, debug).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('INFO', (assert) => {
const info = levels.INFO;
assertThat(assert, info).isLessThanOrEqualTo([
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, info).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
]);
assertThat(assert, info).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
]);
assertThat(assert, info).isNotGreaterThanOrEqualTo([
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, info).isEqualTo([levels.getLevel('INFO')]);
assertThat(assert, info).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('WARN', (assert) => {
const warn = levels.WARN;
assertThat(assert, warn).isLessThanOrEqualTo([
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, warn).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
]);
assertThat(assert, warn).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
]);
assertThat(assert, warn).isNotGreaterThanOrEqualTo([
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, warn).isEqualTo([levels.getLevel('WARN')]);
assertThat(assert, warn).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.ERROR,
levels.FATAL,
levels.OFF,
]);
assert.end();
});
t.test('ERROR', (assert) => {
const error = levels.ERROR;
assertThat(assert, error).isLessThanOrEqualTo([
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, error).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
]);
assertThat(assert, error).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
]);
assertThat(assert, error).isNotGreaterThanOrEqualTo([
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, error).isEqualTo([levels.getLevel('ERROR')]);
assertThat(assert, error).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('FATAL', (assert) => {
const fatal = levels.FATAL;
assertThat(assert, fatal).isLessThanOrEqualTo([levels.MARK, levels.OFF]);
assertThat(assert, fatal).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
]);
assertThat(assert, fatal).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
]);
assertThat(assert, fatal).isNotGreaterThanOrEqualTo([
levels.MARK,
levels.OFF,
]);
assertThat(assert, fatal).isEqualTo([levels.getLevel('FATAL')]);
assertThat(assert, fatal).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('MARK', (assert) => {
const mark = levels.MARK;
assertThat(assert, mark).isLessThanOrEqualTo([levels.OFF]);
assertThat(assert, mark).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.FATAL,
levels.ERROR,
]);
assertThat(assert, mark).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
]);
assertThat(assert, mark).isNotGreaterThanOrEqualTo([levels.OFF]);
assertThat(assert, mark).isEqualTo([levels.getLevel('MARK')]);
assertThat(assert, mark).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF,
]);
assert.end();
});
t.test('OFF', (assert) => {
const off = levels.OFF;
assertThat(assert, off).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
]);
assertThat(assert, off).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
]);
assertThat(assert, off).isEqualTo([levels.getLevel('OFF')]);
assertThat(assert, off).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
]);
assert.end();
});
t.end();
});
batch.test('isGreaterThanOrEqualTo', (t) => {
const info = levels.INFO;
assertThat(t, info).isGreaterThanOrEqualTo(['all', 'trace', 'debug']);
assertThat(t, info).isNotGreaterThanOrEqualTo([
'warn',
'ERROR',
'Fatal',
'MARK',
'off',
]);
t.end();
});
batch.test('isLessThanOrEqualTo', (t) => {
const info = levels.INFO;
assertThat(t, info).isNotLessThanOrEqualTo(['all', 'trace', 'debug']);
assertThat(t, info).isLessThanOrEqualTo([
'warn',
'ERROR',
'Fatal',
'MARK',
'off',
]);
t.end();
});
batch.test('isEqualTo', (t) => {
const info = levels.INFO;
assertThat(t, info).isEqualTo(['info', 'INFO', 'iNfO']);
t.end();
});
batch.test('getLevel', (t) => {
t.equal(levels.getLevel('debug'), levels.DEBUG);
t.equal(levels.getLevel('DEBUG'), levels.DEBUG);
t.equal(levels.getLevel('DeBuG'), levels.DEBUG);
t.notOk(levels.getLevel('cheese'));
t.equal(levels.getLevel('cheese', levels.DEBUG), levels.DEBUG);
t.equal(
levels.getLevel({ level: 10000, levelStr: 'DEBUG', colour: 'cyan' }),
levels.DEBUG
);
t.end();
});
batch.end();
});
| const { test } = require('tap');
const levels = require('../../lib/levels');
function assertThat(assert, level) {
function assertForEach(assertion, testFn, otherLevels) {
otherLevels.forEach((other) => {
assertion.call(assert, testFn.call(level, other));
});
}
return {
isLessThanOrEqualTo(lvls) {
assertForEach(assert.ok, level.isLessThanOrEqualTo, lvls);
},
isNotLessThanOrEqualTo(lvls) {
assertForEach(assert.notOk, level.isLessThanOrEqualTo, lvls);
},
isGreaterThanOrEqualTo(lvls) {
assertForEach(assert.ok, level.isGreaterThanOrEqualTo, lvls);
},
isNotGreaterThanOrEqualTo(lvls) {
assertForEach(assert.notOk, level.isGreaterThanOrEqualTo, lvls);
},
isEqualTo(lvls) {
assertForEach(assert.ok, level.isEqualTo, lvls);
},
isNotEqualTo(lvls) {
assertForEach(assert.notOk, level.isEqualTo, lvls);
},
};
}
test('levels', (batch) => {
batch.test('values', (t) => {
t.test('should define some levels', (assert) => {
assert.ok(levels.ALL);
assert.ok(levels.TRACE);
assert.ok(levels.DEBUG);
assert.ok(levels.INFO);
assert.ok(levels.WARN);
assert.ok(levels.ERROR);
assert.ok(levels.FATAL);
assert.ok(levels.MARK);
assert.ok(levels.OFF);
assert.end();
});
t.test('ALL', (assert) => {
const all = levels.ALL;
assertThat(assert, all).isLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, all).isNotGreaterThanOrEqualTo([
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, all).isEqualTo([levels.getLevel('ALL')]);
assertThat(assert, all).isNotEqualTo([
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('TRACE', (assert) => {
const trace = levels.TRACE;
assertThat(assert, trace).isLessThanOrEqualTo([
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, trace).isNotLessThanOrEqualTo([levels.ALL]);
assertThat(assert, trace).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
]);
assertThat(assert, trace).isNotGreaterThanOrEqualTo([
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, trace).isEqualTo([levels.getLevel('TRACE')]);
assertThat(assert, trace).isNotEqualTo([
levels.ALL,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('DEBUG', (assert) => {
const debug = levels.DEBUG;
assertThat(assert, debug).isLessThanOrEqualTo([
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, debug).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
]);
assertThat(assert, debug).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
]);
assertThat(assert, debug).isNotGreaterThanOrEqualTo([
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, debug).isEqualTo([levels.getLevel('DEBUG')]);
assertThat(assert, debug).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('INFO', (assert) => {
const info = levels.INFO;
assertThat(assert, info).isLessThanOrEqualTo([
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, info).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
]);
assertThat(assert, info).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
]);
assertThat(assert, info).isNotGreaterThanOrEqualTo([
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, info).isEqualTo([levels.getLevel('INFO')]);
assertThat(assert, info).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('WARN', (assert) => {
const warn = levels.WARN;
assertThat(assert, warn).isLessThanOrEqualTo([
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, warn).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
]);
assertThat(assert, warn).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
]);
assertThat(assert, warn).isNotGreaterThanOrEqualTo([
levels.ERROR,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, warn).isEqualTo([levels.getLevel('WARN')]);
assertThat(assert, warn).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.ERROR,
levels.FATAL,
levels.OFF,
]);
assert.end();
});
t.test('ERROR', (assert) => {
const error = levels.ERROR;
assertThat(assert, error).isLessThanOrEqualTo([
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, error).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
]);
assertThat(assert, error).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
]);
assertThat(assert, error).isNotGreaterThanOrEqualTo([
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assertThat(assert, error).isEqualTo([levels.getLevel('ERROR')]);
assertThat(assert, error).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.FATAL,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('FATAL', (assert) => {
const fatal = levels.FATAL;
assertThat(assert, fatal).isLessThanOrEqualTo([levels.MARK, levels.OFF]);
assertThat(assert, fatal).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
]);
assertThat(assert, fatal).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
]);
assertThat(assert, fatal).isNotGreaterThanOrEqualTo([
levels.MARK,
levels.OFF,
]);
assertThat(assert, fatal).isEqualTo([levels.getLevel('FATAL')]);
assertThat(assert, fatal).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.MARK,
levels.OFF,
]);
assert.end();
});
t.test('MARK', (assert) => {
const mark = levels.MARK;
assertThat(assert, mark).isLessThanOrEqualTo([levels.OFF]);
assertThat(assert, mark).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.FATAL,
levels.ERROR,
]);
assertThat(assert, mark).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
]);
assertThat(assert, mark).isNotGreaterThanOrEqualTo([levels.OFF]);
assertThat(assert, mark).isEqualTo([levels.getLevel('MARK')]);
assertThat(assert, mark).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF,
]);
assert.end();
});
t.test('OFF', (assert) => {
const off = levels.OFF;
assertThat(assert, off).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
]);
assertThat(assert, off).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
]);
assertThat(assert, off).isEqualTo([levels.getLevel('OFF')]);
assertThat(assert, off).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.MARK,
]);
assert.end();
});
t.end();
});
batch.test('isGreaterThanOrEqualTo', (t) => {
const info = levels.INFO;
assertThat(t, info).isGreaterThanOrEqualTo(['all', 'trace', 'debug']);
assertThat(t, info).isNotGreaterThanOrEqualTo([
'warn',
'ERROR',
'Fatal',
'MARK',
'off',
]);
t.end();
});
batch.test('isLessThanOrEqualTo', (t) => {
const info = levels.INFO;
assertThat(t, info).isNotLessThanOrEqualTo(['all', 'trace', 'debug']);
assertThat(t, info).isLessThanOrEqualTo([
'warn',
'ERROR',
'Fatal',
'MARK',
'off',
]);
t.end();
});
batch.test('isEqualTo', (t) => {
const info = levels.INFO;
assertThat(t, info).isEqualTo(['info', 'INFO', 'iNfO']);
t.end();
});
batch.test('getLevel', (t) => {
t.equal(levels.getLevel('debug'), levels.DEBUG);
t.equal(levels.getLevel('DEBUG'), levels.DEBUG);
t.equal(levels.getLevel('DeBuG'), levels.DEBUG);
t.notOk(levels.getLevel('cheese'));
t.equal(levels.getLevel('cheese', levels.DEBUG), levels.DEBUG);
t.equal(
levels.getLevel({ level: 10000, levelStr: 'DEBUG', colour: 'cyan' }),
levels.DEBUG
);
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./lib/LoggingEvent.js | const flatted = require('flatted');
const levels = require('./levels');
/**
* @name LoggingEvent
* @namespace Log4js
*/
class LoggingEvent {
/**
* Models a logging event.
* @constructor
* @param {string} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {Array} data objects to log
* @author Seth Chisamore
*/
constructor(categoryName, level, data, context, location) {
this.startTime = new Date();
this.categoryName = categoryName;
this.data = data;
this.level = level;
this.context = Object.assign({}, context); // eslint-disable-line prefer-object-spread
this.pid = process.pid;
if (location) {
this.fileName = location.fileName;
this.lineNumber = location.lineNumber;
this.columnNumber = location.columnNumber;
this.callStack = location.callStack;
this.className = location.className;
this.functionName = location.functionName;
this.functionAlias = location.functionAlias;
this.callerName = location.callerName;
}
}
serialise() {
return flatted.stringify(this, (key, value) => {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
// duck-typing for Error object
if (value && value.message && value.stack) {
// eslint-disable-next-line prefer-object-spread
value = Object.assign(
{ message: value.message, stack: value.stack },
value
);
}
// JSON.stringify({a: parseInt('abc'), b: 1/0, c: -1/0}) returns {a: null, b: null, c: null}.
// The following allows us to serialize to NaN, Infinity and -Infinity correctly.
else if (
typeof value === 'number' &&
(Number.isNaN(value) || !Number.isFinite(value))
) {
value = value.toString();
}
// JSON.stringify([undefined]) returns [null].
// The following allows us to serialize to undefined correctly.
else if (typeof value === 'undefined') {
value = typeof value;
}
return value;
});
}
static deserialise(serialised) {
let event;
try {
const rehydratedEvent = flatted.parse(serialised, (key, value) => {
if (value && value.message && value.stack) {
const fakeError = new Error(value);
Object.keys(value).forEach((k) => {
fakeError[k] = value[k];
});
value = fakeError;
}
return value;
});
rehydratedEvent.location = {
fileName: rehydratedEvent.fileName,
lineNumber: rehydratedEvent.lineNumber,
columnNumber: rehydratedEvent.columnNumber,
callStack: rehydratedEvent.callStack,
className: rehydratedEvent.className,
functionName: rehydratedEvent.functionName,
functionAlias: rehydratedEvent.functionAlias,
callerName: rehydratedEvent.callerName,
};
event = new LoggingEvent(
rehydratedEvent.categoryName,
levels.getLevel(rehydratedEvent.level.levelStr),
rehydratedEvent.data,
rehydratedEvent.context,
rehydratedEvent.location
);
event.startTime = new Date(rehydratedEvent.startTime);
event.pid = rehydratedEvent.pid;
event.cluster = rehydratedEvent.cluster;
} catch (e) {
event = new LoggingEvent('log4js', levels.ERROR, [
'Unable to parse log:',
serialised,
'because: ',
e,
]);
}
return event;
}
}
module.exports = LoggingEvent;
| const flatted = require('flatted');
const levels = require('./levels');
/**
* @name LoggingEvent
* @namespace Log4js
*/
class LoggingEvent {
/**
* Models a logging event.
* @constructor
* @param {string} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {Array} data objects to log
* @author Seth Chisamore
*/
constructor(categoryName, level, data, context, location) {
this.startTime = new Date();
this.categoryName = categoryName;
this.data = data;
this.level = level;
this.context = Object.assign({}, context); // eslint-disable-line prefer-object-spread
this.pid = process.pid;
if (location) {
this.fileName = location.fileName;
this.lineNumber = location.lineNumber;
this.columnNumber = location.columnNumber;
this.callStack = location.callStack;
this.className = location.className;
this.functionName = location.functionName;
this.functionAlias = location.functionAlias;
this.callerName = location.callerName;
}
}
serialise() {
return flatted.stringify(this, (key, value) => {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
// duck-typing for Error object
if (value && value.message && value.stack) {
// eslint-disable-next-line prefer-object-spread
value = Object.assign(
{ message: value.message, stack: value.stack },
value
);
}
// JSON.stringify({a: parseInt('abc'), b: 1/0, c: -1/0}) returns {a: null, b: null, c: null}.
// The following allows us to serialize to NaN, Infinity and -Infinity correctly.
else if (
typeof value === 'number' &&
(Number.isNaN(value) || !Number.isFinite(value))
) {
value = value.toString();
}
// JSON.stringify([undefined]) returns [null].
// The following allows us to serialize to undefined correctly.
else if (typeof value === 'undefined') {
value = typeof value;
}
return value;
});
}
static deserialise(serialised) {
let event;
try {
const rehydratedEvent = flatted.parse(serialised, (key, value) => {
if (value && value.message && value.stack) {
const fakeError = new Error(value);
Object.keys(value).forEach((k) => {
fakeError[k] = value[k];
});
value = fakeError;
}
return value;
});
rehydratedEvent.location = {
fileName: rehydratedEvent.fileName,
lineNumber: rehydratedEvent.lineNumber,
columnNumber: rehydratedEvent.columnNumber,
callStack: rehydratedEvent.callStack,
className: rehydratedEvent.className,
functionName: rehydratedEvent.functionName,
functionAlias: rehydratedEvent.functionAlias,
callerName: rehydratedEvent.callerName,
};
event = new LoggingEvent(
rehydratedEvent.categoryName,
levels.getLevel(rehydratedEvent.level.levelStr),
rehydratedEvent.data,
rehydratedEvent.context,
rehydratedEvent.location
);
event.startTime = new Date(rehydratedEvent.startTime);
event.pid = rehydratedEvent.pid;
event.cluster = rehydratedEvent.cluster;
} catch (e) {
event = new LoggingEvent('log4js', levels.ERROR, [
'Unable to parse log:',
serialised,
'because: ',
e,
]);
}
return event;
}
}
module.exports = LoggingEvent;
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./lib/clustering.js | const debug = require('debug')('log4js:clustering');
const LoggingEvent = require('./LoggingEvent');
const configuration = require('./configuration');
let disabled = false;
let cluster = null;
try {
// eslint-disable-next-line global-require
cluster = require('cluster');
} catch (e) {
debug('cluster module not present');
disabled = true;
}
const listeners = [];
let pm2 = false;
let pm2InstanceVar = 'NODE_APP_INSTANCE';
const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0';
const isMaster = () =>
disabled || (cluster && cluster.isMaster) || isPM2Master();
const sendToListeners = (logEvent) => {
listeners.forEach((l) => l(logEvent));
};
// in a multi-process node environment, worker loggers will use
// process.send
const receiver = (worker, message) => {
// prior to node v6, the worker parameter was not passed (args were message, handle)
debug('cluster message received from worker ', worker, ': ', message);
if (worker.topic && worker.data) {
message = worker;
worker = undefined;
}
if (message && message.topic && message.topic === 'log4js:message') {
debug('received message: ', message.data);
const logEvent = LoggingEvent.deserialise(message.data);
sendToListeners(logEvent);
}
};
if (!disabled) {
configuration.addListener((config) => {
// clear out the listeners, because configure has been called.
listeners.length = 0;
({
pm2,
disableClustering: disabled,
pm2InstanceVar = 'NODE_APP_INSTANCE',
} = config);
debug(`clustering disabled ? ${disabled}`);
debug(`cluster.isMaster ? ${cluster && cluster.isMaster}`);
debug(`pm2 enabled ? ${pm2}`);
debug(`pm2InstanceVar = ${pm2InstanceVar}`);
debug(`process.env[${pm2InstanceVar}] = ${process.env[pm2InstanceVar]}`);
// just in case configure is called after shutdown
if (pm2) {
process.removeListener('message', receiver);
}
if (cluster && cluster.removeListener) {
cluster.removeListener('message', receiver);
}
if (disabled || config.disableClustering) {
debug('Not listening for cluster messages, because clustering disabled.');
} else if (isPM2Master()) {
// PM2 cluster support
// PM2 runs everything as workers - install pm2-intercom for this to work.
// we only want one of the app instances to write logs
debug('listening for PM2 broadcast messages');
process.on('message', receiver);
} else if (cluster && cluster.isMaster) {
debug('listening for cluster messages');
cluster.on('message', receiver);
} else {
debug('not listening for messages, because we are not a master process');
}
});
}
module.exports = {
onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster),
isMaster,
send: (msg) => {
if (isMaster()) {
sendToListeners(msg);
} else {
if (!pm2) {
msg.cluster = {
workerId: cluster.worker.id,
worker: process.pid,
};
}
process.send({ topic: 'log4js:message', data: msg.serialise() });
}
},
onMessage: (listener) => {
listeners.push(listener);
},
};
| const debug = require('debug')('log4js:clustering');
const LoggingEvent = require('./LoggingEvent');
const configuration = require('./configuration');
let disabled = false;
let cluster = null;
try {
// eslint-disable-next-line global-require
cluster = require('cluster');
} catch (e) {
debug('cluster module not present');
disabled = true;
}
const listeners = [];
let pm2 = false;
let pm2InstanceVar = 'NODE_APP_INSTANCE';
const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0';
const isMaster = () =>
disabled || (cluster && cluster.isMaster) || isPM2Master();
const sendToListeners = (logEvent) => {
listeners.forEach((l) => l(logEvent));
};
// in a multi-process node environment, worker loggers will use
// process.send
const receiver = (worker, message) => {
// prior to node v6, the worker parameter was not passed (args were message, handle)
debug('cluster message received from worker ', worker, ': ', message);
if (worker.topic && worker.data) {
message = worker;
worker = undefined;
}
if (message && message.topic && message.topic === 'log4js:message') {
debug('received message: ', message.data);
const logEvent = LoggingEvent.deserialise(message.data);
sendToListeners(logEvent);
}
};
if (!disabled) {
configuration.addListener((config) => {
// clear out the listeners, because configure has been called.
listeners.length = 0;
({
pm2,
disableClustering: disabled,
pm2InstanceVar = 'NODE_APP_INSTANCE',
} = config);
debug(`clustering disabled ? ${disabled}`);
debug(`cluster.isMaster ? ${cluster && cluster.isMaster}`);
debug(`pm2 enabled ? ${pm2}`);
debug(`pm2InstanceVar = ${pm2InstanceVar}`);
debug(`process.env[${pm2InstanceVar}] = ${process.env[pm2InstanceVar]}`);
// just in case configure is called after shutdown
if (pm2) {
process.removeListener('message', receiver);
}
if (cluster && cluster.removeListener) {
cluster.removeListener('message', receiver);
}
if (disabled || config.disableClustering) {
debug('Not listening for cluster messages, because clustering disabled.');
} else if (isPM2Master()) {
// PM2 cluster support
// PM2 runs everything as workers - install pm2-intercom for this to work.
// we only want one of the app instances to write logs
debug('listening for PM2 broadcast messages');
process.on('message', receiver);
} else if (cluster && cluster.isMaster) {
debug('listening for cluster messages');
cluster.on('message', receiver);
} else {
debug('not listening for messages, because we are not a master process');
}
});
}
module.exports = {
onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster),
isMaster,
send: (msg) => {
if (isMaster()) {
sendToListeners(msg);
} else {
if (!pm2) {
msg.cluster = {
workerId: cluster.worker.id,
worker: process.pid,
};
}
process.send({ topic: 'log4js:message', data: msg.serialise() });
}
},
onMessage: (listener) => {
listeners.push(listener);
},
};
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/no-cluster-test.js | const { test } = require('tap');
const proxyquire = require('proxyquire');
test('clustering is disabled if cluster is not present', (t) => {
const log4js = proxyquire('../../lib/log4js', { cluster: null });
const recorder = require('../../lib/appenders/recording');
log4js.configure({
appenders: { vcr: { type: 'recording' } },
categories: { default: { appenders: ['vcr'], level: 'debug' } },
});
log4js.getLogger().info('it should still work');
const events = recorder.replay();
t.equal(events[0].data[0], 'it should still work');
t.end();
});
| const { test } = require('tap');
const proxyquire = require('proxyquire');
test('clustering is disabled if cluster is not present', (t) => {
const log4js = proxyquire('../../lib/log4js', { cluster: null });
const recorder = require('../../lib/appenders/recording');
log4js.configure({
appenders: { vcr: { type: 'recording' } },
categories: { default: { appenders: ['vcr'], level: 'debug' } },
});
log4js.getLogger().info('it should still work');
const events = recorder.replay();
t.equal(events[0].data[0], 'it should still work');
t.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/logging-test.js | const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const util = require('util');
const recording = require('../../lib/appenders/recording');
test('log4js', (batch) => {
batch.test(
'shutdown should return appenders and categories back to initial state',
(t) => {
const stringifyMap = (map) => JSON.stringify(Array.from(map));
const deepCopyMap = (map) => new Map(JSON.parse(stringifyMap(map)));
const log4js = require('../../lib/log4js');
const appenders = require('../../lib/appenders');
const categories = require('../../lib/categories');
const initialAppenders = deepCopyMap(appenders);
const initialCategories = deepCopyMap(categories);
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const configuredAppenders = deepCopyMap(appenders);
const configuredCategories = deepCopyMap(categories);
t.not(
stringifyMap(configuredAppenders),
stringifyMap(initialAppenders),
'appenders should be different from initial state'
);
t.not(
stringifyMap(configuredCategories),
stringifyMap(initialCategories),
'categories should be different from initial state'
);
log4js.shutdown(() => {
const finalAppenders = deepCopyMap(appenders);
const finalCategories = deepCopyMap(categories);
t.equal(
stringifyMap(finalAppenders),
stringifyMap(initialAppenders),
'appenders should revert back to initial state'
);
t.equal(
stringifyMap(finalCategories),
stringifyMap(initialCategories),
'categories should revert back to initial state'
);
t.end();
});
}
);
batch.test('getLogger', (t) => {
const log4js = require('../../lib/log4js');
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const logger = log4js.getLogger('tests');
t.test('should take a category and return a logger', (assert) => {
assert.equal(logger.category, 'tests');
assert.equal(logger.level.toString(), 'DEBUG');
assert.type(logger.debug, 'function');
assert.type(logger.info, 'function');
assert.type(logger.warn, 'function');
assert.type(logger.error, 'function');
assert.type(logger.fatal, 'function');
assert.end();
});
t.test('log events', (assert) => {
recording.reset();
logger.debug('Debug event');
logger.trace('Trace event 1');
logger.trace('Trace event 2');
logger.warn('Warning event');
logger.error('Aargh!', new Error('Pants are on fire!'));
logger.error('Simulated CouchDB problem', {
err: 127,
cause: 'incendiary underwear',
});
const events = recording.replay();
assert.equal(events[0].level.toString(), 'DEBUG');
assert.equal(events[0].data[0], 'Debug event');
assert.type(events[0].startTime, 'Date');
assert.equal(events.length, 4, 'should not emit events of a lower level');
assert.equal(events[1].level.toString(), 'WARN');
assert.type(
events[2].data[1],
'Error',
'should include the error if passed in'
);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
assert.end();
});
t.end();
});
batch.test('when shutdown is called', (t) => {
const events = {
shutdownCalled: [],
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/file': {
name: 'file',
configure() {
function thing(evt) {
events.event = evt;
return null;
}
thing.shutdown = function(cb) {
events.shutdownCalled.push(true);
cb();
};
return thing;
},
},
},
});
const config = {
appenders: {
file: {
type: 'file',
filename: 'cheesy-wotsits.log',
maxLogSize: 1024,
backups: 3,
},
alsoFile: {
type: 'file',
},
},
categories: {
default: { appenders: ['file', 'alsoFile'], level: 'DEBUG' },
},
};
log4js.configure(config);
const logger = log4js.getLogger();
log4js.shutdown(() => {
t.equal(
events.shutdownCalled.length,
2,
'should invoke appender shutdowns'
);
logger.info('this should not go to the appenders');
logger.log('info', 'this should not go to the appenders');
logger._log(require('../../lib/levels').INFO, [
'this should not go to the appenders',
]);
t.notOk(events.event);
t.end();
});
});
batch.test('configuration when passed as filename', (t) => {
let appenderConfig;
let configFilename;
const log4js = sandbox.require('../../lib/log4js', {
ignoreMissing: true,
requires: {
fs: {
statSync() {
return { mtime: Date.now() };
},
readFileSync(filename) {
configFilename = filename;
return JSON.stringify({
appenders: {
file: {
type: 'file',
filename: 'whatever.log',
},
},
categories: { default: { appenders: ['file'], level: 'DEBUG' } },
});
},
readdirSync() {
return ['file'];
},
},
'./file': {
configure(configuration) {
appenderConfig = configuration;
return function() {};
},
},
},
});
log4js.configure('/path/to/cheese.json');
t.equal(
configFilename,
'/path/to/cheese.json',
'should read the config from a file'
);
t.equal(
appenderConfig.filename,
'whatever.log',
'should pass config to appender'
);
t.end();
});
batch.test('with configure not called', (t) => {
const fakeStdoutAppender = {
configure() {
this.required = true;
return function(evt) {
fakeStdoutAppender.evt = evt;
};
},
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/stdout': fakeStdoutAppender,
},
});
const logger = log4js.getLogger('some-logger');
logger.debug('This is a test');
t.ok(fakeStdoutAppender.required, 'stdout should be required');
t.notOk(fakeStdoutAppender.evt, 'should not log anything');
t.end();
});
batch.test('with configure called with empty values', (t) => {
[null, undefined, '', ' ', []].forEach((config) => {
const log4js = require('../../lib/log4js');
const expectedError = `Problem reading config from file "${util.inspect(
config
)}". Error was ENOENT: no such file or directory`;
t.throws(() => log4js.configure(config), expectedError);
});
t.end();
});
batch.test('configuration persistence', (t) => {
const firstLog4js = require('../../lib/log4js');
firstLog4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
recording.reset();
const secondLog4js = require('../../lib/log4js');
secondLog4js
.getLogger()
.info('This should go to the appender defined in firstLog4js');
t.equal(
recording.replay()[0].data[0],
'This should go to the appender defined in firstLog4js'
);
t.end();
});
batch.end();
});
| const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const util = require('util');
const recording = require('../../lib/appenders/recording');
test('log4js', (batch) => {
batch.test(
'shutdown should return appenders and categories back to initial state',
(t) => {
const stringifyMap = (map) => JSON.stringify(Array.from(map));
const deepCopyMap = (map) => new Map(JSON.parse(stringifyMap(map)));
const log4js = require('../../lib/log4js');
const appenders = require('../../lib/appenders');
const categories = require('../../lib/categories');
const initialAppenders = deepCopyMap(appenders);
const initialCategories = deepCopyMap(categories);
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const configuredAppenders = deepCopyMap(appenders);
const configuredCategories = deepCopyMap(categories);
t.not(
stringifyMap(configuredAppenders),
stringifyMap(initialAppenders),
'appenders should be different from initial state'
);
t.not(
stringifyMap(configuredCategories),
stringifyMap(initialCategories),
'categories should be different from initial state'
);
log4js.shutdown(() => {
const finalAppenders = deepCopyMap(appenders);
const finalCategories = deepCopyMap(categories);
t.equal(
stringifyMap(finalAppenders),
stringifyMap(initialAppenders),
'appenders should revert back to initial state'
);
t.equal(
stringifyMap(finalCategories),
stringifyMap(initialCategories),
'categories should revert back to initial state'
);
t.end();
});
}
);
batch.test('getLogger', (t) => {
const log4js = require('../../lib/log4js');
log4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
const logger = log4js.getLogger('tests');
t.test('should take a category and return a logger', (assert) => {
assert.equal(logger.category, 'tests');
assert.equal(logger.level.toString(), 'DEBUG');
assert.type(logger.debug, 'function');
assert.type(logger.info, 'function');
assert.type(logger.warn, 'function');
assert.type(logger.error, 'function');
assert.type(logger.fatal, 'function');
assert.end();
});
t.test('log events', (assert) => {
recording.reset();
logger.debug('Debug event');
logger.trace('Trace event 1');
logger.trace('Trace event 2');
logger.warn('Warning event');
logger.error('Aargh!', new Error('Pants are on fire!'));
logger.error('Simulated CouchDB problem', {
err: 127,
cause: 'incendiary underwear',
});
const events = recording.replay();
assert.equal(events[0].level.toString(), 'DEBUG');
assert.equal(events[0].data[0], 'Debug event');
assert.type(events[0].startTime, 'Date');
assert.equal(events.length, 4, 'should not emit events of a lower level');
assert.equal(events[1].level.toString(), 'WARN');
assert.type(
events[2].data[1],
'Error',
'should include the error if passed in'
);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
assert.end();
});
t.end();
});
batch.test('when shutdown is called', (t) => {
const events = {
shutdownCalled: [],
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/file': {
name: 'file',
configure() {
function thing(evt) {
events.event = evt;
return null;
}
thing.shutdown = function(cb) {
events.shutdownCalled.push(true);
cb();
};
return thing;
},
},
},
});
const config = {
appenders: {
file: {
type: 'file',
filename: 'cheesy-wotsits.log',
maxLogSize: 1024,
backups: 3,
},
alsoFile: {
type: 'file',
},
},
categories: {
default: { appenders: ['file', 'alsoFile'], level: 'DEBUG' },
},
};
log4js.configure(config);
const logger = log4js.getLogger();
log4js.shutdown(() => {
t.equal(
events.shutdownCalled.length,
2,
'should invoke appender shutdowns'
);
logger.info('this should not go to the appenders');
logger.log('info', 'this should not go to the appenders');
logger._log(require('../../lib/levels').INFO, [
'this should not go to the appenders',
]);
t.notOk(events.event);
t.end();
});
});
batch.test('configuration when passed as filename', (t) => {
let appenderConfig;
let configFilename;
const log4js = sandbox.require('../../lib/log4js', {
ignoreMissing: true,
requires: {
fs: {
statSync() {
return { mtime: Date.now() };
},
readFileSync(filename) {
configFilename = filename;
return JSON.stringify({
appenders: {
file: {
type: 'file',
filename: 'whatever.log',
},
},
categories: { default: { appenders: ['file'], level: 'DEBUG' } },
});
},
readdirSync() {
return ['file'];
},
},
'./file': {
configure(configuration) {
appenderConfig = configuration;
return function() {};
},
},
},
});
log4js.configure('/path/to/cheese.json');
t.equal(
configFilename,
'/path/to/cheese.json',
'should read the config from a file'
);
t.equal(
appenderConfig.filename,
'whatever.log',
'should pass config to appender'
);
t.end();
});
batch.test('with configure not called', (t) => {
const fakeStdoutAppender = {
configure() {
this.required = true;
return function(evt) {
fakeStdoutAppender.evt = evt;
};
},
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/stdout': fakeStdoutAppender,
},
});
const logger = log4js.getLogger('some-logger');
logger.debug('This is a test');
t.ok(fakeStdoutAppender.required, 'stdout should be required');
t.notOk(fakeStdoutAppender.evt, 'should not log anything');
t.end();
});
batch.test('with configure called with empty values', (t) => {
[null, undefined, '', ' ', []].forEach((config) => {
const log4js = require('../../lib/log4js');
const expectedError = `Problem reading config from file "${util.inspect(
config
)}". Error was ENOENT: no such file or directory`;
t.throws(() => log4js.configure(config), expectedError);
});
t.end();
});
batch.test('configuration persistence', (t) => {
const firstLog4js = require('../../lib/log4js');
firstLog4js.configure({
appenders: { recorder: { type: 'recording' } },
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
});
recording.reset();
const secondLog4js = require('../../lib/log4js');
secondLog4js
.getLogger()
.info('This should go to the appender defined in firstLog4js');
t.equal(
recording.replay()[0].data[0],
'This should go to the appender defined in firstLog4js'
);
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/dateFileAppender-test.js | /* eslint max-classes-per-file: ["error", 3] */
const { test } = require('tap');
const path = require('path');
const fs = require('fs');
const EOL = require('os').EOL || '\n';
const format = require('date-format');
const sandbox = require('@log4js-node/sandboxed-module');
const log4js = require('../../lib/log4js');
const osDelay = process.platform === 'win32' ? 400 : 200;
function removeFile(filename) {
try {
fs.unlinkSync(path.join(__dirname, filename));
} catch (e) {
// doesn't matter
}
}
test('../../lib/appenders/dateFile', (batch) => {
batch.test('with default settings', (t) => {
const testFile = path.join(__dirname, 'date-appender-default.log');
log4js.configure({
appenders: { date: { type: 'dateFile', filename: testFile } },
categories: { default: { appenders: ['date'], level: 'DEBUG' } },
});
const logger = log4js.getLogger('default-settings');
logger.info('This should be in the file.');
t.teardown(() => {
removeFile('date-appender-default.log');
});
setTimeout(() => {
fs.readFile(testFile, 'utf8', (err, contents) => {
t.match(contents, 'This should be in the file');
t.match(
contents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
}, osDelay);
});
batch.test('configure with dateFileAppender', (t) => {
log4js.configure({
appenders: {
date: {
type: 'dateFile',
filename: 'test/tap/date-file-test.log',
pattern: '-yyyy-MM-dd',
layout: { type: 'messagePassThrough' },
},
},
categories: { default: { appenders: ['date'], level: 'WARN' } },
});
const logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
log4js.shutdown(() => {
fs.readFile(
path.join(__dirname, 'date-file-test.log'),
'utf8',
(err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(
contents.indexOf('this should not be written to the file'),
-1
);
t.end();
}
);
});
t.teardown(() => {
removeFile('date-file-test.log');
});
});
batch.test('configure with options.alwaysIncludePattern', (t) => {
const options = {
appenders: {
date: {
category: 'tests',
type: 'dateFile',
filename: 'test/tap/date-file-test',
pattern: 'yyyy-MM-dd.log',
alwaysIncludePattern: true,
layout: {
type: 'messagePassThrough',
},
},
},
categories: { default: { appenders: ['date'], level: 'debug' } },
};
const thisTime = format.asString(
options.appenders.date.pattern,
new Date()
);
const testFile = `date-file-test.${thisTime}`;
const existingFile = path.join(__dirname, testFile);
fs.writeFileSync(existingFile, `this is existing data${EOL}`, 'utf8');
log4js.configure(options);
const logger = log4js.getLogger('tests');
logger.warn('this should be written to the file with the appended date');
t.teardown(() => {
removeFile(testFile);
});
// wait for filesystem to catch up
log4js.shutdown(() => {
fs.readFile(existingFile, 'utf8', (err, contents) => {
t.match(
contents,
'this is existing data',
'should not overwrite the file on open (issue #132)'
);
t.match(
contents,
'this should be written to the file with the appended date'
);
t.end();
});
});
});
batch.test('should flush logs on shutdown', (t) => {
const testFile = path.join(__dirname, 'date-appender-flush.log');
log4js.configure({
appenders: { test: { type: 'dateFile', filename: testFile } },
categories: { default: { appenders: ['test'], level: 'trace' } },
});
const logger = log4js.getLogger('default-settings');
logger.info('1');
logger.info('2');
logger.info('3');
t.teardown(() => {
removeFile('date-appender-flush.log');
});
log4js.shutdown(() => {
fs.readFile(testFile, 'utf8', (err, fileContents) => {
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test('should map maxLogSize to maxSize', (t) => {
const fakeStreamroller = {};
class DateRollingFileStream {
constructor(filename, pattern, options) {
fakeStreamroller.filename = filename;
fakeStreamroller.pattern = pattern;
fakeStreamroller.options = options;
}
on() {} // eslint-disable-line class-methods-use-this
}
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
const dateFileAppenderModule = sandbox.require(
'../../lib/appenders/dateFile',
{
requires: { streamroller: fakeStreamroller },
}
);
dateFileAppenderModule.configure(
{
filename: 'cheese.log',
pattern: 'yyyy',
maxLogSize: 100,
},
{ basicLayout: () => {} }
);
t.equal(fakeStreamroller.options.maxSize, 100);
t.end();
});
batch.test('handling of writer.writable', (t) => {
const output = [];
let writable = true;
const DateRollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
// eslint-disable-next-line class-methods-use-this
on() {}
// eslint-disable-next-line class-methods-use-this
get writable() {
return writable;
}
};
const dateFileAppender = sandbox.require('../../lib/appenders/dateFile', {
requires: {
streamroller: {
DateRollingFileStream,
},
},
});
const appender = dateFileAppender.configure(
{ filename: 'test1.log', maxLogSize: 100 },
{
basicLayout(loggingEvent) {
return loggingEvent.data;
},
}
);
t.test('should log when writer.writable=true', (assert) => {
writable = true;
appender({ data: 'something to log' });
assert.ok(output.length, 1);
assert.match(output[output.length - 1], 'something to log');
assert.end();
});
t.test('should not log when writer.writable=false', (assert) => {
writable = false;
appender({ data: 'this should not be logged' });
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], 'this should not be logged');
assert.end();
});
t.end();
});
batch.test('when underlying stream errors', (t) => {
let consoleArgs;
let errorHandler;
const DateRollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === 'error') {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const dateFileAppender = sandbox.require('../../lib/appenders/dateFile', {
globals: {
console: {
error(...args) {
consoleArgs = args;
},
},
},
requires: {
streamroller: {
DateRollingFileStream,
},
},
});
dateFileAppender.configure(
{ filename: 'test1.log', maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: 'aargh' });
t.test('should log the error to console.error', (assert) => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
'log4js.dateFileAppender - Writing to file %s, error happened '
);
assert.equal(consoleArgs[1], 'test1.log');
assert.equal(consoleArgs[2].error, 'aargh');
assert.end();
});
t.end();
});
batch.end();
});
| /* eslint max-classes-per-file: ["error", 3] */
const { test } = require('tap');
const path = require('path');
const fs = require('fs');
const EOL = require('os').EOL || '\n';
const format = require('date-format');
const sandbox = require('@log4js-node/sandboxed-module');
const log4js = require('../../lib/log4js');
const osDelay = process.platform === 'win32' ? 400 : 200;
function removeFile(filename) {
try {
fs.unlinkSync(path.join(__dirname, filename));
} catch (e) {
// doesn't matter
}
}
test('../../lib/appenders/dateFile', (batch) => {
batch.test('with default settings', (t) => {
const testFile = path.join(__dirname, 'date-appender-default.log');
log4js.configure({
appenders: { date: { type: 'dateFile', filename: testFile } },
categories: { default: { appenders: ['date'], level: 'DEBUG' } },
});
const logger = log4js.getLogger('default-settings');
logger.info('This should be in the file.');
t.teardown(() => {
removeFile('date-appender-default.log');
});
setTimeout(() => {
fs.readFile(testFile, 'utf8', (err, contents) => {
t.match(contents, 'This should be in the file');
t.match(
contents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
}, osDelay);
});
batch.test('configure with dateFileAppender', (t) => {
log4js.configure({
appenders: {
date: {
type: 'dateFile',
filename: 'test/tap/date-file-test.log',
pattern: '-yyyy-MM-dd',
layout: { type: 'messagePassThrough' },
},
},
categories: { default: { appenders: ['date'], level: 'WARN' } },
});
const logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
log4js.shutdown(() => {
fs.readFile(
path.join(__dirname, 'date-file-test.log'),
'utf8',
(err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(
contents.indexOf('this should not be written to the file'),
-1
);
t.end();
}
);
});
t.teardown(() => {
removeFile('date-file-test.log');
});
});
batch.test('configure with options.alwaysIncludePattern', (t) => {
const options = {
appenders: {
date: {
category: 'tests',
type: 'dateFile',
filename: 'test/tap/date-file-test',
pattern: 'yyyy-MM-dd.log',
alwaysIncludePattern: true,
layout: {
type: 'messagePassThrough',
},
},
},
categories: { default: { appenders: ['date'], level: 'debug' } },
};
const thisTime = format.asString(
options.appenders.date.pattern,
new Date()
);
const testFile = `date-file-test.${thisTime}`;
const existingFile = path.join(__dirname, testFile);
fs.writeFileSync(existingFile, `this is existing data${EOL}`, 'utf8');
log4js.configure(options);
const logger = log4js.getLogger('tests');
logger.warn('this should be written to the file with the appended date');
t.teardown(() => {
removeFile(testFile);
});
// wait for filesystem to catch up
log4js.shutdown(() => {
fs.readFile(existingFile, 'utf8', (err, contents) => {
t.match(
contents,
'this is existing data',
'should not overwrite the file on open (issue #132)'
);
t.match(
contents,
'this should be written to the file with the appended date'
);
t.end();
});
});
});
batch.test('should flush logs on shutdown', (t) => {
const testFile = path.join(__dirname, 'date-appender-flush.log');
log4js.configure({
appenders: { test: { type: 'dateFile', filename: testFile } },
categories: { default: { appenders: ['test'], level: 'trace' } },
});
const logger = log4js.getLogger('default-settings');
logger.info('1');
logger.info('2');
logger.info('3');
t.teardown(() => {
removeFile('date-appender-flush.log');
});
log4js.shutdown(() => {
fs.readFile(testFile, 'utf8', (err, fileContents) => {
// 3 lines of output, plus the trailing newline.
t.equal(fileContents.split(EOL).length, 4);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test('should map maxLogSize to maxSize', (t) => {
const fakeStreamroller = {};
class DateRollingFileStream {
constructor(filename, pattern, options) {
fakeStreamroller.filename = filename;
fakeStreamroller.pattern = pattern;
fakeStreamroller.options = options;
}
on() {} // eslint-disable-line class-methods-use-this
}
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
const dateFileAppenderModule = sandbox.require(
'../../lib/appenders/dateFile',
{
requires: { streamroller: fakeStreamroller },
}
);
dateFileAppenderModule.configure(
{
filename: 'cheese.log',
pattern: 'yyyy',
maxLogSize: 100,
},
{ basicLayout: () => {} }
);
t.equal(fakeStreamroller.options.maxSize, 100);
t.end();
});
batch.test('handling of writer.writable', (t) => {
const output = [];
let writable = true;
const DateRollingFileStream = class {
write(loggingEvent) {
output.push(loggingEvent);
this.written = true;
return true;
}
// eslint-disable-next-line class-methods-use-this
on() {}
// eslint-disable-next-line class-methods-use-this
get writable() {
return writable;
}
};
const dateFileAppender = sandbox.require('../../lib/appenders/dateFile', {
requires: {
streamroller: {
DateRollingFileStream,
},
},
});
const appender = dateFileAppender.configure(
{ filename: 'test1.log', maxLogSize: 100 },
{
basicLayout(loggingEvent) {
return loggingEvent.data;
},
}
);
t.test('should log when writer.writable=true', (assert) => {
writable = true;
appender({ data: 'something to log' });
assert.ok(output.length, 1);
assert.match(output[output.length - 1], 'something to log');
assert.end();
});
t.test('should not log when writer.writable=false', (assert) => {
writable = false;
appender({ data: 'this should not be logged' });
assert.ok(output.length, 1);
assert.notMatch(output[output.length - 1], 'this should not be logged');
assert.end();
});
t.end();
});
batch.test('when underlying stream errors', (t) => {
let consoleArgs;
let errorHandler;
const DateRollingFileStream = class {
end() {
this.ended = true;
}
on(evt, cb) {
if (evt === 'error') {
this.errored = true;
errorHandler = cb;
}
}
write() {
this.written = true;
return true;
}
};
const dateFileAppender = sandbox.require('../../lib/appenders/dateFile', {
globals: {
console: {
error(...args) {
consoleArgs = args;
},
},
},
requires: {
streamroller: {
DateRollingFileStream,
},
},
});
dateFileAppender.configure(
{ filename: 'test1.log', maxLogSize: 100 },
{ basicLayout() {} }
);
errorHandler({ error: 'aargh' });
t.test('should log the error to console.error', (assert) => {
assert.ok(consoleArgs);
assert.equal(
consoleArgs[0],
'log4js.dateFileAppender - Writing to file %s, error happened '
);
assert.equal(consoleArgs[1], 'test1.log');
assert.equal(consoleArgs[2].error, 'aargh');
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./.git/refs/heads/master | bd457888eb91b9e932fe8f66d720cf2d9d6442f4
| bd457888eb91b9e932fe8f66d720cf2d9d6442f4
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/file.md | # File Appender
The file appender writes log events to a file. It supports an optional maximum file size, and will keep a configurable number of backups. When using the file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
- `type` - `"file"`
- `filename` - `string` - the path of the file where you want your logs written.
- `maxLogSize` - `integer` (optional, defaults to undefined) - the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
`maxLogSize` can also accept `string` with the size suffixes: **_K_**, **_M_**, **_G_** such as `1K`, `1M`, `1G`.
- `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling (excluding the hot file).
- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
- `encoding` - `string` (default "utf-8")
- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
- `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
- `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
- `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
- `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [dateFile appender](dateFile.md) as well. So you could roll files by both date and size.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
const logger = log4js.getLogger();
logger.debug("I will be logged in all-the-logs.log");
```
This example will result in a single log file (`all-the-logs.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: "file",
filename: "all-the-logs.log",
maxLogSize: 10485760,
backups: 3,
compress: true,
},
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
```
This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: "file", filename: "out.log" },
},
categories: { default: { appenders: ["output"], level: "debug" } },
});
let paused = false;
process.on("log4js:pause", (value) => (paused = value));
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| # File Appender
The file appender writes log events to a file. It supports an optional maximum file size, and will keep a configurable number of backups. When using the file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
- `type` - `"file"`
- `filename` - `string` - the path of the file where you want your logs written.
- `maxLogSize` - `integer` (optional, defaults to undefined) - the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
`maxLogSize` can also accept `string` with the size suffixes: **_K_**, **_M_**, **_G_** such as `1K`, `1M`, `1G`.
- `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling (excluding the hot file).
- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
- `encoding` - `string` (default "utf-8")
- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
- `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
- `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
- `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
- `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [dateFile appender](dateFile.md) as well. So you could roll files by both date and size.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
const logger = log4js.getLogger();
logger.debug("I will be logged in all-the-logs.log");
```
This example will result in a single log file (`all-the-logs.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: "file",
filename: "all-the-logs.log",
maxLogSize: 10485760,
backups: 3,
compress: true,
},
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
```
This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: "file", filename: "out.log" },
},
categories: { default: { appenders: ["output"], level: "debug" } },
});
let paused = false;
process.on("log4js:pause", (value) => (paused = value));
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./docs/connect-logger.md | # Connect / Express Logger
The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`.
```javascript
var log4js = require("log4js");
var express = require("express");
log4js.configure({
appenders: {
console: { type: "console" },
file: { type: "file", filename: "cheese.log" },
},
categories: {
cheese: { appenders: ["file"], level: "info" },
default: { appenders: ["console"], level: "info" },
},
});
var logger = log4js.getLogger("cheese");
var app = express();
app.use(log4js.connectLogger(logger, { level: "info" }));
app.get("/", function(req, res) {
res.send("hello world");
});
app.listen(5000);
```
The log4js.connectLogger supports the passing of an options object that can be used to set the following:
- log level
- log format string or function (the same as the connect/express logger)
- nolog expressions (represented as a string, regexp, array, or function(req, res))
- status code rulesets
For example:
```javascript
app.use(
log4js.connectLogger(logger, {
level: log4js.levels.INFO,
format: ":method :url",
})
);
```
or:
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
// include the Express request ID in the logs
format: (req, res, format) =>
format(
`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
),
})
);
```
When you request of POST, you want to log the request body parameter like JSON.
The log format function is very useful.
Please use log format function instead "tokens" property for use express's request or response.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "info",
format: (req, res, format) =>
format(`:remote-addr :method :url ${JSON.stringify(req.body)}`),
})
);
```
Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x.
- http responses 3xx, level = WARN
- http responses 4xx & 5xx, level = ERROR
- else, level = INFO
```javascript
app.use(log4js.connectLogger(logger, { level: "auto" }));
```
The levels of returned status codes can be configured via status code rulesets.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
statusRules: [
{ from: 200, to: 299, level: "debug" },
{ codes: [303, 304], level: "info" },
],
})
);
```
The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, array, or function(req, res) to omit certain log messages. Example of 1.2 below.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: "\\.gif|\\.jpg$",
})
);
```
or
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: (req, res) => res.statusCode < 400,
})
);
```
The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`.
Application can use it in layouts or appenders.
In application:
```javascript
app.use(log4js.connectLogger(logger, { context: true }));
```
In layout:
```javascript
log4js.addLayout("customLayout", () => {
return (loggingEvent) => {
const res = loggingEvent.context.res;
return util.format(
...loggingEvent.data,
res ? `status: ${res.statusCode}` : ""
);
};
});
```
## Example nolog values
| nolog value | Will Not Log | Will Log |
| --------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------ |
| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif |
| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge |
| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 |
| `/\.(gif\|jpe?g\|png)$/` | as above | as above |
| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` |
| # Connect / Express Logger
The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`.
```javascript
var log4js = require("log4js");
var express = require("express");
log4js.configure({
appenders: {
console: { type: "console" },
file: { type: "file", filename: "cheese.log" },
},
categories: {
cheese: { appenders: ["file"], level: "info" },
default: { appenders: ["console"], level: "info" },
},
});
var logger = log4js.getLogger("cheese");
var app = express();
app.use(log4js.connectLogger(logger, { level: "info" }));
app.get("/", function(req, res) {
res.send("hello world");
});
app.listen(5000);
```
The log4js.connectLogger supports the passing of an options object that can be used to set the following:
- log level
- log format string or function (the same as the connect/express logger)
- nolog expressions (represented as a string, regexp, array, or function(req, res))
- status code rulesets
For example:
```javascript
app.use(
log4js.connectLogger(logger, {
level: log4js.levels.INFO,
format: ":method :url",
})
);
```
or:
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
// include the Express request ID in the logs
format: (req, res, format) =>
format(
`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
),
})
);
```
When you request of POST, you want to log the request body parameter like JSON.
The log format function is very useful.
Please use log format function instead "tokens" property for use express's request or response.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "info",
format: (req, res, format) =>
format(`:remote-addr :method :url ${JSON.stringify(req.body)}`),
})
);
```
Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x.
- http responses 3xx, level = WARN
- http responses 4xx & 5xx, level = ERROR
- else, level = INFO
```javascript
app.use(log4js.connectLogger(logger, { level: "auto" }));
```
The levels of returned status codes can be configured via status code rulesets.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
statusRules: [
{ from: 200, to: 299, level: "debug" },
{ codes: [303, 304], level: "info" },
],
})
);
```
The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, array, or function(req, res) to omit certain log messages. Example of 1.2 below.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: "\\.gif|\\.jpg$",
})
);
```
or
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: (req, res) => res.statusCode < 400,
})
);
```
The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`.
Application can use it in layouts or appenders.
In application:
```javascript
app.use(log4js.connectLogger(logger, { context: true }));
```
In layout:
```javascript
log4js.addLayout("customLayout", () => {
return (loggingEvent) => {
const res = loggingEvent.context.res;
return util.format(
...loggingEvent.data,
res ? `status: ${res.statusCode}` : ""
);
};
});
```
## Example nolog values
| nolog value | Will Not Log | Will Log |
| --------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------ |
| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif |
| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge |
| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 |
| `/\.(gif\|jpe?g\|png)$/` | as above | as above |
| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` |
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/layouts-test.js | const { test } = require('tap');
const debug = require('debug');
const os = require('os');
const path = require('path');
const { EOL } = os;
// used for patternLayout tests.
function testPattern(assert, layout, event, tokens, pattern, value) {
assert.equal(layout(pattern, tokens)(event), value);
}
test('log4js layouts', (batch) => {
batch.test('colouredLayout', (t) => {
const layout = require('../../lib/layouts').colouredLayout;
t.test('should apply level colour codes to output', (assert) => {
const output = layout({
data: ['nonsense'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
toString() {
return 'ERROR';
},
colour: 'red',
},
});
assert.equal(
output,
'\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'
);
assert.end();
});
t.test(
'should support the console.log format for the message',
(assert) => {
const output = layout({
data: ['thing %d', 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
toString() {
return 'ERROR';
},
colour: 'red',
},
});
assert.equal(
output,
'\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2'
);
assert.end();
}
);
t.end();
});
batch.test('messagePassThroughLayout', (t) => {
const layout = require('../../lib/layouts').messagePassThroughLayout;
t.equal(
layout({
data: ['nonsense'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'nonsense',
'should take a logevent and output only the message'
);
t.equal(
layout({
data: ['thing %d', 1, 'cheese'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'thing 1 cheese',
'should support the console.log format for the message'
);
t.equal(
layout({
data: [{ thing: 1 }],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'{ thing: 1 }',
'should output the first item even if it is not a string'
);
t.match(
layout({
data: [new Error()],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
/at (Test\.batch\.test(\.t)?|Test\.<anonymous>)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
'regexp did not return a match - should print the stacks of a passed error objects'
);
t.test('with passed augmented errors', (assert) => {
const e = new Error('My Unique Error Message');
e.augmented = 'My Unique attribute value';
e.augObj = { at1: 'at2' };
const layoutOutput = layout({
data: [e],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
});
assert.match(
layoutOutput,
/Error: My Unique Error Message/,
'should print the contained error message'
);
assert.match(
layoutOutput,
/augmented:\s'My Unique attribute value'/,
'should print error augmented string attributes'
);
assert.match(
layoutOutput,
/augObj:\s\{ at1: 'at2' \}/,
'should print error augmented object attributes'
);
assert.end();
});
t.end();
});
batch.test('basicLayout', (t) => {
const layout = require('../../lib/layouts').basicLayout;
const event = {
data: ['this is a test'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'tests',
level: {
toString() {
return 'DEBUG';
},
},
};
t.equal(
layout(event),
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test'
);
t.test(
'should output a stacktrace, message if the event has an error attached',
(assert) => {
let i;
const error = new Error('Some made-up error');
const stack = error.stack.split(/\n/);
event.data = ['this is a test', error];
const output = layout(event);
const lines = output.split(/\n/);
assert.equal(lines.length, stack.length);
assert.equal(
lines[0],
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error'
);
for (i = 1; i < stack.length; i++) {
assert.equal(lines[i], stack[i]);
}
assert.end();
}
);
t.test(
'should output any extra data in the log event as util.inspect strings',
(assert) => {
event.data = [
'this is a test',
{
name: 'Cheese',
message: 'Gorgonzola smells.',
},
];
const output = layout(event);
assert.equal(
output,
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test ' +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
assert.end();
}
);
t.end();
});
batch.test('dummyLayout', (t) => {
const layout = require('../../lib/layouts').dummyLayout;
t.test('should output just the first element of the log data', (assert) => {
const event = {
data: ['this is the first value', 'this is not'],
startTime: new Date('2010-12-05 14:18:30.045'),
categoryName: 'multiple.levels.of.tests',
level: {
toString() {
return 'DEBUG';
},
colour: 'cyan',
},
};
assert.equal(layout(event), 'this is the first value');
assert.end();
});
t.end();
});
batch.test('patternLayout', (t) => {
const originalListener = process.listeners('warning')[
process.listeners('warning').length - 1
];
const warningListener = (error) => {
if (error.name === 'DeprecationWarning') {
if (
error.code.startsWith('log4js-node-DEP0003') ||
error.code.startsWith('log4js-node-DEP0004')
) {
return;
}
}
originalListener(error);
};
process.off('warning', originalListener);
process.on('warning', warningListener);
const debugWasEnabled = debug.enabled('log4js:layouts');
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
const originalNamespace = debug.disable();
debug.enable(`${originalNamespace}, log4js:layouts`);
batch.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off('warning', warningListener);
process.on('warning', originalListener);
});
process.stderr.write = originalWrite;
debug.enable(originalNamespace);
});
const tokens = {
testString: 'testStringToken',
testFunction() {
return 'testFunctionToken';
},
fnThatUsesLogEvent(logEvent) {
return logEvent.level.toString();
},
};
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at Foo.bar [as baz] (repl:1:14)\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = path.normalize('/log4js-node/test/tap/layouts-test.js');
const lineNumber = 1;
const columnNumber = 14;
const className = 'Foo';
const functionName = 'bar';
const functionAlias = 'baz';
const callerName = 'Foo.bar [as baz]';
const event = {
data: ['this is a test'],
startTime: new Date('2010-12-05 14:18:30.045'),
categoryName: 'multiple.levels.of.tests',
level: {
toString() {
return 'DEBUG';
},
colour: 'cyan',
},
context: tokens,
// location
callStack,
fileName,
lineNumber,
columnNumber,
className,
functionName,
functionAlias,
callerName,
};
event.startTime.getTimezoneOffset = () => -600;
const layout = require('../../lib/layouts').patternLayout;
t.test(
'should default to "time logLevel loggerName - message"',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
null,
`14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
);
assert.end();
}
);
t.test('%r should output time only', (assert) => {
testPattern(assert, layout, event, tokens, '%r', '14:18:30');
assert.end();
});
t.test('%p should output the log level', (assert) => {
testPattern(assert, layout, event, tokens, '%p', 'DEBUG');
assert.end();
});
t.test('%c should output the log category', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%c',
'multiple.levels.of.tests'
);
assert.end();
});
t.test('%m should output the log data', (assert) => {
testPattern(assert, layout, event, tokens, '%m', 'this is a test');
assert.end();
});
t.test('%n should output a new line', (assert) => {
testPattern(assert, layout, event, tokens, '%n', EOL);
assert.end();
});
t.test('%h should output hostname', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%h',
os.hostname().toString()
);
assert.end();
});
t.test('%z should output pid', (assert) => {
testPattern(assert, layout, event, tokens, '%z', process.pid.toString());
assert.end();
});
t.test('%z should pick up pid from log event if present', (assert) => {
event.pid = '1234';
testPattern(assert, layout, event, tokens, '%z', '1234');
delete event.pid;
assert.end();
});
t.test('%y should output pid (was cluster info)', (assert) => {
testPattern(assert, layout, event, tokens, '%y', process.pid.toString());
assert.end();
});
t.test(
'%c should handle category names like java-style package names',
(assert) => {
testPattern(assert, layout, event, tokens, '%c{1}', 'tests');
testPattern(assert, layout, event, tokens, '%c{2}', 'of.tests');
testPattern(assert, layout, event, tokens, '%c{3}', 'levels.of.tests');
testPattern(
assert,
layout,
event,
tokens,
'%c{4}',
'multiple.levels.of.tests'
);
testPattern(
assert,
layout,
event,
tokens,
'%c{5}',
'multiple.levels.of.tests'
);
testPattern(
assert,
layout,
event,
tokens,
'%c{99}',
'multiple.levels.of.tests'
);
assert.end();
}
);
t.test('%d should output the date in ISO8601 format', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%d',
'2010-12-05T14:18:30.045'
);
assert.end();
});
t.test('%d should allow for format specification', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%d{ISO8601}',
'2010-12-05T14:18:30.045'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{ISO8601_WITH_TZ_OFFSET}',
'2010-12-05T14:18:30.045+10:00'
);
const DEP0003 = debugLogs.filter(
(e) => e.indexOf('log4js-node-DEP0003') > -1
).length;
testPattern(
assert,
layout,
event,
tokens,
'%d{ABSOLUTE}', // deprecated
'14:18:30.045'
);
assert.equal(
debugLogs.filter((e) => e.indexOf('log4js-node-DEP0003') > -1).length,
DEP0003 + 1,
'deprecation log4js-node-DEP0003 emitted'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{ABSOLUTETIME}',
'14:18:30.045'
);
const DEP0004 = debugLogs.filter(
(e) => e.indexOf('log4js-node-DEP0004') > -1
).length;
testPattern(
assert,
layout,
event,
tokens,
'%d{DATE}', // deprecated
'05 12 2010 14:18:30.045'
);
assert.equal(
debugLogs.filter((e) => e.indexOf('log4js-node-DEP0004') > -1).length,
DEP0004 + 1,
'deprecation log4js-node-DEP0004 emitted'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{DATETIME}',
'05 12 2010 14:18:30.045'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yy MM dd hh mm ss}',
'10 12 05 14 18 30'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yyyy MM dd}',
'2010 12 05'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yyyy MM dd hh mm ss SSS}',
'2010 12 05 14 18 30 045'
);
assert.end();
});
t.test('%% should output %', (assert) => {
testPattern(assert, layout, event, tokens, '%%', '%');
assert.end();
});
t.test('%f should output filename', (assert) => {
testPattern(assert, layout, event, tokens, '%f', fileName);
assert.end();
});
t.test('%f should handle filename depth', (assert) => {
testPattern(assert, layout, event, tokens, '%f{1}', 'layouts-test.js');
testPattern(
assert,
layout,
event,
tokens,
'%f{2}',
path.join('tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{3}',
path.join('test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{4}',
path.join('log4js-node', 'test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{5}',
path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{99}',
path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
);
assert.end();
});
t.test('%f should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%.5f', fileName.slice(0, 5));
testPattern(
assert,
layout,
event,
tokens,
'%20f{1}',
' layouts-test.js'
);
testPattern(
assert,
layout,
event,
tokens,
'%30.30f{2}',
` ${path.join('tap', 'layouts-test.js')}`
);
testPattern(assert, layout, event, tokens, '%10.-5f{1}', ' st.js');
assert.end();
});
t.test('%l should output line number', (assert) => {
testPattern(assert, layout, event, tokens, '%l', lineNumber.toString());
assert.end();
});
t.test('%l should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%5.10l', ' 1');
testPattern(assert, layout, event, tokens, '%.5l', '1');
testPattern(assert, layout, event, tokens, '%.-5l', '1');
testPattern(assert, layout, event, tokens, '%-5l', '1 ');
assert.end();
});
t.test('%o should output column postion', (assert) => {
testPattern(assert, layout, event, tokens, '%o', columnNumber.toString());
assert.end();
});
t.test('%o should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%5.10o', ' 14');
testPattern(assert, layout, event, tokens, '%.5o', '14');
testPattern(assert, layout, event, tokens, '%.1o', '1');
testPattern(assert, layout, event, tokens, '%.-1o', '4');
testPattern(assert, layout, event, tokens, '%-5o', '14 ');
assert.end();
});
t.test('%s should output stack', (assert) => {
testPattern(assert, layout, event, tokens, '%s', callStack);
assert.end();
});
t.test(
'%f should output empty string when fileName not exist',
(assert) => {
delete event.fileName;
testPattern(assert, layout, event, tokens, '%f', '');
assert.end();
}
);
t.test(
'%l should output empty string when lineNumber not exist',
(assert) => {
delete event.lineNumber;
testPattern(assert, layout, event, tokens, '%l', '');
assert.end();
}
);
t.test(
'%o should output empty string when columnNumber not exist',
(assert) => {
delete event.columnNumber;
testPattern(assert, layout, event, tokens, '%o', '');
assert.end();
}
);
t.test(
'%s should output empty string when callStack not exist',
(assert) => {
delete event.callStack;
testPattern(assert, layout, event, tokens, '%s', '');
assert.end();
}
);
t.test('should output anything not preceded by % as literal', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'blah blah blah',
'blah blah blah'
);
assert.end();
});
t.test(
'should output the original string if no replacer matches the token',
(assert) => {
testPattern(assert, layout, event, tokens, '%a{3}', 'a{3}');
assert.end();
}
);
t.test('should handle complicated patterns', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n', // deprecated
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
testPattern(
assert,
layout,
event,
tokens,
'%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n',
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
assert.end();
});
t.test('should truncate fields if specified', (assert) => {
testPattern(assert, layout, event, tokens, '%.4m', 'this');
testPattern(assert, layout, event, tokens, '%.7m', 'this is');
testPattern(assert, layout, event, tokens, '%.9m', 'this is a');
testPattern(assert, layout, event, tokens, '%.14m', 'this is a test');
testPattern(
assert,
layout,
event,
tokens,
'%.2919102m',
'this is a test'
);
testPattern(assert, layout, event, tokens, '%.-4m', 'test');
assert.end();
});
t.test('should pad fields if specified', (assert) => {
testPattern(assert, layout, event, tokens, '%10p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%8p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%6p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%4p', 'DEBUG');
testPattern(assert, layout, event, tokens, '%-4p', 'DEBUG');
testPattern(assert, layout, event, tokens, '%-6p', 'DEBUG ');
testPattern(assert, layout, event, tokens, '%-8p', 'DEBUG ');
testPattern(assert, layout, event, tokens, '%-10p', 'DEBUG ');
assert.end();
});
t.test('%[%r%] should output colored time', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%[%r%]',
'\x1B[36m14:18:30\x1B[39m'
);
assert.end();
});
t.test(
'%x{testString} should output the string stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{testString}',
'testStringToken'
);
assert.end();
}
);
t.test(
'%x{testFunction} should output the result of the function stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{testFunction}',
'testFunctionToken'
);
assert.end();
}
);
t.test(
'%x{doesNotExist} should output the string stored in tokens',
(assert) => {
testPattern(assert, layout, event, tokens, '%x{doesNotExist}', 'null');
assert.end();
}
);
t.test(
'%x{fnThatUsesLogEvent} should be able to use the logEvent',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{fnThatUsesLogEvent}',
'DEBUG'
);
assert.end();
}
);
t.test('%x should output the string stored in tokens', (assert) => {
testPattern(assert, layout, event, tokens, '%x', 'null');
assert.end();
});
t.test(
'%X{testString} should output the string stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{testString}',
'testStringToken'
);
assert.end();
}
);
t.test(
'%X{testFunction} should output the result of the function stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{testFunction}',
'testFunctionToken'
);
assert.end();
}
);
t.test(
'%X{doesNotExist} should output the string stored in tokens',
(assert) => {
testPattern(assert, layout, event, {}, '%X{doesNotExist}', 'null');
assert.end();
}
);
t.test(
'%X{fnThatUsesLogEvent} should be able to use the logEvent',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{fnThatUsesLogEvent}',
'DEBUG'
);
assert.end();
}
);
t.test('%X should output the string stored in tokens', (assert) => {
testPattern(assert, layout, event, {}, '%X', 'null');
assert.end();
});
t.test('%M should output function name', (assert) => {
testPattern(assert, layout, event, tokens, '%M', functionName);
assert.end();
});
t.test(
'%M should output empty string when functionName not exist',
(assert) => {
delete event.functionName;
testPattern(assert, layout, event, tokens, '%M', '');
assert.end();
}
);
t.test('%C should output class name', (assert) => {
testPattern(assert, layout, event, tokens, '%C', className);
assert.end();
});
t.test(
'%C should output empty string when className not exist',
(assert) => {
delete event.className;
testPattern(assert, layout, event, tokens, '%C', '');
assert.end();
}
);
t.test('%A should output function alias', (assert) => {
testPattern(assert, layout, event, tokens, '%A', functionAlias);
assert.end();
});
t.test(
'%A should output empty string when functionAlias not exist',
(assert) => {
delete event.functionAlias;
testPattern(assert, layout, event, tokens, '%A', '');
assert.end();
}
);
t.test('%F should output fully qualified caller name', (assert) => {
testPattern(assert, layout, event, tokens, '%F', callerName);
assert.end();
});
t.test(
'%F should output empty string when callerName not exist',
(assert) => {
delete event.callerName;
testPattern(assert, layout, event, tokens, '%F', '');
assert.end();
}
);
t.end();
});
batch.test('layout makers', (t) => {
const layouts = require('../../lib/layouts');
t.test('should have a maker for each layout', (assert) => {
assert.ok(layouts.layout('messagePassThrough'));
assert.ok(layouts.layout('basic'));
assert.ok(layouts.layout('colored'));
assert.ok(layouts.layout('coloured'));
assert.ok(layouts.layout('pattern'));
assert.ok(layouts.layout('dummy'));
assert.end();
});
t.test(
'layout pattern maker should pass pattern and tokens to layout from config',
(assert) => {
let layout = layouts.layout('pattern', { pattern: '%%' });
assert.equal(layout({}), '%');
layout = layouts.layout('pattern', {
pattern: '%x{testStringToken}',
tokens: { testStringToken: 'cheese' },
});
assert.equal(layout({}), 'cheese');
assert.end();
}
);
t.end();
});
batch.test('add layout', (t) => {
const layouts = require('../../lib/layouts');
t.test('should be able to add a layout', (assert) => {
layouts.addLayout('test_layout', (config) => {
assert.equal(config, 'test_config');
return function(logEvent) {
return `TEST LAYOUT >${logEvent.data}`;
};
});
const serializer = layouts.layout('test_layout', 'test_config');
assert.ok(serializer);
assert.equal(serializer({ data: 'INPUT' }), 'TEST LAYOUT >INPUT');
assert.end();
});
t.end();
});
batch.end();
});
| const { test } = require('tap');
const debug = require('debug');
const os = require('os');
const path = require('path');
const { EOL } = os;
// used for patternLayout tests.
function testPattern(assert, layout, event, tokens, pattern, value) {
assert.equal(layout(pattern, tokens)(event), value);
}
test('log4js layouts', (batch) => {
batch.test('colouredLayout', (t) => {
const layout = require('../../lib/layouts').colouredLayout;
t.test('should apply level colour codes to output', (assert) => {
const output = layout({
data: ['nonsense'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
toString() {
return 'ERROR';
},
colour: 'red',
},
});
assert.equal(
output,
'\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'
);
assert.end();
});
t.test(
'should support the console.log format for the message',
(assert) => {
const output = layout({
data: ['thing %d', 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
toString() {
return 'ERROR';
},
colour: 'red',
},
});
assert.equal(
output,
'\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2'
);
assert.end();
}
);
t.end();
});
batch.test('messagePassThroughLayout', (t) => {
const layout = require('../../lib/layouts').messagePassThroughLayout;
t.equal(
layout({
data: ['nonsense'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'nonsense',
'should take a logevent and output only the message'
);
t.equal(
layout({
data: ['thing %d', 1, 'cheese'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'thing 1 cheese',
'should support the console.log format for the message'
);
t.equal(
layout({
data: [{ thing: 1 }],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'{ thing: 1 }',
'should output the first item even if it is not a string'
);
t.match(
layout({
data: [new Error()],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
/at (Test\.batch\.test(\.t)?|Test\.<anonymous>)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
'regexp did not return a match - should print the stacks of a passed error objects'
);
t.test('with passed augmented errors', (assert) => {
const e = new Error('My Unique Error Message');
e.augmented = 'My Unique attribute value';
e.augObj = { at1: 'at2' };
const layoutOutput = layout({
data: [e],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
});
assert.match(
layoutOutput,
/Error: My Unique Error Message/,
'should print the contained error message'
);
assert.match(
layoutOutput,
/augmented:\s'My Unique attribute value'/,
'should print error augmented string attributes'
);
assert.match(
layoutOutput,
/augObj:\s\{ at1: 'at2' \}/,
'should print error augmented object attributes'
);
assert.end();
});
t.end();
});
batch.test('basicLayout', (t) => {
const layout = require('../../lib/layouts').basicLayout;
const event = {
data: ['this is a test'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'tests',
level: {
toString() {
return 'DEBUG';
},
},
};
t.equal(
layout(event),
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test'
);
t.test(
'should output a stacktrace, message if the event has an error attached',
(assert) => {
let i;
const error = new Error('Some made-up error');
const stack = error.stack.split(/\n/);
event.data = ['this is a test', error];
const output = layout(event);
const lines = output.split(/\n/);
assert.equal(lines.length, stack.length);
assert.equal(
lines[0],
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error'
);
for (i = 1; i < stack.length; i++) {
assert.equal(lines[i], stack[i]);
}
assert.end();
}
);
t.test(
'should output any extra data in the log event as util.inspect strings',
(assert) => {
event.data = [
'this is a test',
{
name: 'Cheese',
message: 'Gorgonzola smells.',
},
];
const output = layout(event);
assert.equal(
output,
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test ' +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
assert.end();
}
);
t.end();
});
batch.test('dummyLayout', (t) => {
const layout = require('../../lib/layouts').dummyLayout;
t.test('should output just the first element of the log data', (assert) => {
const event = {
data: ['this is the first value', 'this is not'],
startTime: new Date('2010-12-05 14:18:30.045'),
categoryName: 'multiple.levels.of.tests',
level: {
toString() {
return 'DEBUG';
},
colour: 'cyan',
},
};
assert.equal(layout(event), 'this is the first value');
assert.end();
});
t.end();
});
batch.test('patternLayout', (t) => {
const originalListener = process.listeners('warning')[
process.listeners('warning').length - 1
];
const warningListener = (error) => {
if (error.name === 'DeprecationWarning') {
if (
error.code.startsWith('log4js-node-DEP0003') ||
error.code.startsWith('log4js-node-DEP0004')
) {
return;
}
}
originalListener(error);
};
process.off('warning', originalListener);
process.on('warning', warningListener);
const debugWasEnabled = debug.enabled('log4js:layouts');
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
const originalNamespace = debug.disable();
debug.enable(`${originalNamespace}, log4js:layouts`);
batch.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off('warning', warningListener);
process.on('warning', originalListener);
});
process.stderr.write = originalWrite;
debug.enable(originalNamespace);
});
const tokens = {
testString: 'testStringToken',
testFunction() {
return 'testFunctionToken';
},
fnThatUsesLogEvent(logEvent) {
return logEvent.level.toString();
},
};
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at Foo.bar [as baz] (repl:1:14)\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = path.normalize('/log4js-node/test/tap/layouts-test.js');
const lineNumber = 1;
const columnNumber = 14;
const className = 'Foo';
const functionName = 'bar';
const functionAlias = 'baz';
const callerName = 'Foo.bar [as baz]';
const event = {
data: ['this is a test'],
startTime: new Date('2010-12-05 14:18:30.045'),
categoryName: 'multiple.levels.of.tests',
level: {
toString() {
return 'DEBUG';
},
colour: 'cyan',
},
context: tokens,
// location
callStack,
fileName,
lineNumber,
columnNumber,
className,
functionName,
functionAlias,
callerName,
};
event.startTime.getTimezoneOffset = () => -600;
const layout = require('../../lib/layouts').patternLayout;
t.test(
'should default to "time logLevel loggerName - message"',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
null,
`14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
);
assert.end();
}
);
t.test('%r should output time only', (assert) => {
testPattern(assert, layout, event, tokens, '%r', '14:18:30');
assert.end();
});
t.test('%p should output the log level', (assert) => {
testPattern(assert, layout, event, tokens, '%p', 'DEBUG');
assert.end();
});
t.test('%c should output the log category', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%c',
'multiple.levels.of.tests'
);
assert.end();
});
t.test('%m should output the log data', (assert) => {
testPattern(assert, layout, event, tokens, '%m', 'this is a test');
assert.end();
});
t.test('%n should output a new line', (assert) => {
testPattern(assert, layout, event, tokens, '%n', EOL);
assert.end();
});
t.test('%h should output hostname', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%h',
os.hostname().toString()
);
assert.end();
});
t.test('%z should output pid', (assert) => {
testPattern(assert, layout, event, tokens, '%z', process.pid.toString());
assert.end();
});
t.test('%z should pick up pid from log event if present', (assert) => {
event.pid = '1234';
testPattern(assert, layout, event, tokens, '%z', '1234');
delete event.pid;
assert.end();
});
t.test('%y should output pid (was cluster info)', (assert) => {
testPattern(assert, layout, event, tokens, '%y', process.pid.toString());
assert.end();
});
t.test(
'%c should handle category names like java-style package names',
(assert) => {
testPattern(assert, layout, event, tokens, '%c{1}', 'tests');
testPattern(assert, layout, event, tokens, '%c{2}', 'of.tests');
testPattern(assert, layout, event, tokens, '%c{3}', 'levels.of.tests');
testPattern(
assert,
layout,
event,
tokens,
'%c{4}',
'multiple.levels.of.tests'
);
testPattern(
assert,
layout,
event,
tokens,
'%c{5}',
'multiple.levels.of.tests'
);
testPattern(
assert,
layout,
event,
tokens,
'%c{99}',
'multiple.levels.of.tests'
);
assert.end();
}
);
t.test('%d should output the date in ISO8601 format', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%d',
'2010-12-05T14:18:30.045'
);
assert.end();
});
t.test('%d should allow for format specification', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%d{ISO8601}',
'2010-12-05T14:18:30.045'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{ISO8601_WITH_TZ_OFFSET}',
'2010-12-05T14:18:30.045+10:00'
);
const DEP0003 = debugLogs.filter(
(e) => e.indexOf('log4js-node-DEP0003') > -1
).length;
testPattern(
assert,
layout,
event,
tokens,
'%d{ABSOLUTE}', // deprecated
'14:18:30.045'
);
assert.equal(
debugLogs.filter((e) => e.indexOf('log4js-node-DEP0003') > -1).length,
DEP0003 + 1,
'deprecation log4js-node-DEP0003 emitted'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{ABSOLUTETIME}',
'14:18:30.045'
);
const DEP0004 = debugLogs.filter(
(e) => e.indexOf('log4js-node-DEP0004') > -1
).length;
testPattern(
assert,
layout,
event,
tokens,
'%d{DATE}', // deprecated
'05 12 2010 14:18:30.045'
);
assert.equal(
debugLogs.filter((e) => e.indexOf('log4js-node-DEP0004') > -1).length,
DEP0004 + 1,
'deprecation log4js-node-DEP0004 emitted'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{DATETIME}',
'05 12 2010 14:18:30.045'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yy MM dd hh mm ss}',
'10 12 05 14 18 30'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yyyy MM dd}',
'2010 12 05'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yyyy MM dd hh mm ss SSS}',
'2010 12 05 14 18 30 045'
);
assert.end();
});
t.test('%% should output %', (assert) => {
testPattern(assert, layout, event, tokens, '%%', '%');
assert.end();
});
t.test('%f should output filename', (assert) => {
testPattern(assert, layout, event, tokens, '%f', fileName);
assert.end();
});
t.test('%f should handle filename depth', (assert) => {
testPattern(assert, layout, event, tokens, '%f{1}', 'layouts-test.js');
testPattern(
assert,
layout,
event,
tokens,
'%f{2}',
path.join('tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{3}',
path.join('test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{4}',
path.join('log4js-node', 'test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{5}',
path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{99}',
path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
);
assert.end();
});
t.test('%f should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%.5f', fileName.slice(0, 5));
testPattern(
assert,
layout,
event,
tokens,
'%20f{1}',
' layouts-test.js'
);
testPattern(
assert,
layout,
event,
tokens,
'%30.30f{2}',
` ${path.join('tap', 'layouts-test.js')}`
);
testPattern(assert, layout, event, tokens, '%10.-5f{1}', ' st.js');
assert.end();
});
t.test('%l should output line number', (assert) => {
testPattern(assert, layout, event, tokens, '%l', lineNumber.toString());
assert.end();
});
t.test('%l should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%5.10l', ' 1');
testPattern(assert, layout, event, tokens, '%.5l', '1');
testPattern(assert, layout, event, tokens, '%.-5l', '1');
testPattern(assert, layout, event, tokens, '%-5l', '1 ');
assert.end();
});
t.test('%o should output column postion', (assert) => {
testPattern(assert, layout, event, tokens, '%o', columnNumber.toString());
assert.end();
});
t.test('%o should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%5.10o', ' 14');
testPattern(assert, layout, event, tokens, '%.5o', '14');
testPattern(assert, layout, event, tokens, '%.1o', '1');
testPattern(assert, layout, event, tokens, '%.-1o', '4');
testPattern(assert, layout, event, tokens, '%-5o', '14 ');
assert.end();
});
t.test('%s should output stack', (assert) => {
testPattern(assert, layout, event, tokens, '%s', callStack);
assert.end();
});
t.test(
'%f should output empty string when fileName not exist',
(assert) => {
delete event.fileName;
testPattern(assert, layout, event, tokens, '%f', '');
assert.end();
}
);
t.test(
'%l should output empty string when lineNumber not exist',
(assert) => {
delete event.lineNumber;
testPattern(assert, layout, event, tokens, '%l', '');
assert.end();
}
);
t.test(
'%o should output empty string when columnNumber not exist',
(assert) => {
delete event.columnNumber;
testPattern(assert, layout, event, tokens, '%o', '');
assert.end();
}
);
t.test(
'%s should output empty string when callStack not exist',
(assert) => {
delete event.callStack;
testPattern(assert, layout, event, tokens, '%s', '');
assert.end();
}
);
t.test('should output anything not preceded by % as literal', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'blah blah blah',
'blah blah blah'
);
assert.end();
});
t.test(
'should output the original string if no replacer matches the token',
(assert) => {
testPattern(assert, layout, event, tokens, '%a{3}', 'a{3}');
assert.end();
}
);
t.test('should handle complicated patterns', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n', // deprecated
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
testPattern(
assert,
layout,
event,
tokens,
'%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n',
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
assert.end();
});
t.test('should truncate fields if specified', (assert) => {
testPattern(assert, layout, event, tokens, '%.4m', 'this');
testPattern(assert, layout, event, tokens, '%.7m', 'this is');
testPattern(assert, layout, event, tokens, '%.9m', 'this is a');
testPattern(assert, layout, event, tokens, '%.14m', 'this is a test');
testPattern(
assert,
layout,
event,
tokens,
'%.2919102m',
'this is a test'
);
testPattern(assert, layout, event, tokens, '%.-4m', 'test');
assert.end();
});
t.test('should pad fields if specified', (assert) => {
testPattern(assert, layout, event, tokens, '%10p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%8p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%6p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%4p', 'DEBUG');
testPattern(assert, layout, event, tokens, '%-4p', 'DEBUG');
testPattern(assert, layout, event, tokens, '%-6p', 'DEBUG ');
testPattern(assert, layout, event, tokens, '%-8p', 'DEBUG ');
testPattern(assert, layout, event, tokens, '%-10p', 'DEBUG ');
assert.end();
});
t.test('%[%r%] should output colored time', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%[%r%]',
'\x1B[36m14:18:30\x1B[39m'
);
assert.end();
});
t.test(
'%x{testString} should output the string stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{testString}',
'testStringToken'
);
assert.end();
}
);
t.test(
'%x{testFunction} should output the result of the function stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{testFunction}',
'testFunctionToken'
);
assert.end();
}
);
t.test(
'%x{doesNotExist} should output the string stored in tokens',
(assert) => {
testPattern(assert, layout, event, tokens, '%x{doesNotExist}', 'null');
assert.end();
}
);
t.test(
'%x{fnThatUsesLogEvent} should be able to use the logEvent',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{fnThatUsesLogEvent}',
'DEBUG'
);
assert.end();
}
);
t.test('%x should output the string stored in tokens', (assert) => {
testPattern(assert, layout, event, tokens, '%x', 'null');
assert.end();
});
t.test(
'%X{testString} should output the string stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{testString}',
'testStringToken'
);
assert.end();
}
);
t.test(
'%X{testFunction} should output the result of the function stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{testFunction}',
'testFunctionToken'
);
assert.end();
}
);
t.test(
'%X{doesNotExist} should output the string stored in tokens',
(assert) => {
testPattern(assert, layout, event, {}, '%X{doesNotExist}', 'null');
assert.end();
}
);
t.test(
'%X{fnThatUsesLogEvent} should be able to use the logEvent',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{fnThatUsesLogEvent}',
'DEBUG'
);
assert.end();
}
);
t.test('%X should output the string stored in tokens', (assert) => {
testPattern(assert, layout, event, {}, '%X', 'null');
assert.end();
});
t.test('%M should output function name', (assert) => {
testPattern(assert, layout, event, tokens, '%M', functionName);
assert.end();
});
t.test(
'%M should output empty string when functionName not exist',
(assert) => {
delete event.functionName;
testPattern(assert, layout, event, tokens, '%M', '');
assert.end();
}
);
t.test('%C should output class name', (assert) => {
testPattern(assert, layout, event, tokens, '%C', className);
assert.end();
});
t.test(
'%C should output empty string when className not exist',
(assert) => {
delete event.className;
testPattern(assert, layout, event, tokens, '%C', '');
assert.end();
}
);
t.test('%A should output function alias', (assert) => {
testPattern(assert, layout, event, tokens, '%A', functionAlias);
assert.end();
});
t.test(
'%A should output empty string when functionAlias not exist',
(assert) => {
delete event.functionAlias;
testPattern(assert, layout, event, tokens, '%A', '');
assert.end();
}
);
t.test('%F should output fully qualified caller name', (assert) => {
testPattern(assert, layout, event, tokens, '%F', callerName);
assert.end();
});
t.test(
'%F should output empty string when callerName not exist',
(assert) => {
delete event.callerName;
testPattern(assert, layout, event, tokens, '%F', '');
assert.end();
}
);
t.end();
});
batch.test('layout makers', (t) => {
const layouts = require('../../lib/layouts');
t.test('should have a maker for each layout', (assert) => {
assert.ok(layouts.layout('messagePassThrough'));
assert.ok(layouts.layout('basic'));
assert.ok(layouts.layout('colored'));
assert.ok(layouts.layout('coloured'));
assert.ok(layouts.layout('pattern'));
assert.ok(layouts.layout('dummy'));
assert.end();
});
t.test(
'layout pattern maker should pass pattern and tokens to layout from config',
(assert) => {
let layout = layouts.layout('pattern', { pattern: '%%' });
assert.equal(layout({}), '%');
layout = layouts.layout('pattern', {
pattern: '%x{testStringToken}',
tokens: { testStringToken: 'cheese' },
});
assert.equal(layout({}), 'cheese');
assert.end();
}
);
t.end();
});
batch.test('add layout', (t) => {
const layouts = require('../../lib/layouts');
t.test('should be able to add a layout', (assert) => {
layouts.addLayout('test_layout', (config) => {
assert.equal(config, 'test_config');
return function(logEvent) {
return `TEST LAYOUT >${logEvent.data}`;
};
});
const serializer = layouts.layout('test_layout', 'test_config');
assert.ok(serializer);
assert.equal(serializer({ data: 'INPUT' }), 'TEST LAYOUT >INPUT');
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./test/tap/configuration-test.js | const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const realFS = require('fs');
const modulePath = 'some/path/to/mylog4js.json';
const pathsChecked = [];
let fakeFS = {};
let dependencies;
let fileRead;
test('log4js configure', (batch) => {
batch.beforeEach((done) => {
fileRead = 0;
fakeFS = {
realpath: realFS.realpath, // fs-extra looks for this
ReadStream: realFS.ReadStream, // need to define these, because graceful-fs uses them
WriteStream: realFS.WriteStream,
read: realFS.read,
closeSync: () => {},
config: {
appenders: {
console: {
type: 'console',
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: {
appenders: ['console'],
level: 'INFO',
},
},
},
readdirSync: (dir) => require('fs').readdirSync(dir),
readFileSync: (file, encoding) => {
fileRead += 1;
batch.type(file, 'string');
batch.equal(file, modulePath);
batch.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: (path) => {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: new Date() };
}
throw new Error('no such file');
},
};
dependencies = {
requires: {
fs: fakeFS,
},
};
if (typeof done === 'function') {
done();
}
});
batch.test(
'when configuration file loaded via LOG4JS_CONFIG env variable',
(t) => {
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
const log4js = sandbox.require('../../lib/log4js', dependencies);
log4js.getLogger('test-logger');
t.equal(fileRead, 1, 'should load the specified local config file');
delete process.env.LOG4JS_CONFIG;
t.end();
}
);
batch.test(
'when configuration is set via configure() method call, return the log4js object',
(t) => {
const log4js = sandbox
.require('../../lib/log4js', dependencies)
.configure(fakeFS.config);
t.type(
log4js,
'object',
'Configure method call should return the log4js object!'
);
const log = log4js.getLogger('daemon');
t.type(
log,
'object',
'log4js object, returned by configure(...) method should be able to create log object.'
);
t.type(log.info, 'function');
t.end();
}
);
batch.end();
});
| const { test } = require('tap');
const sandbox = require('@log4js-node/sandboxed-module');
const realFS = require('fs');
const modulePath = 'some/path/to/mylog4js.json';
const pathsChecked = [];
let fakeFS = {};
let dependencies;
let fileRead;
test('log4js configure', (batch) => {
batch.beforeEach((done) => {
fileRead = 0;
fakeFS = {
realpath: realFS.realpath, // fs-extra looks for this
ReadStream: realFS.ReadStream, // need to define these, because graceful-fs uses them
WriteStream: realFS.WriteStream,
read: realFS.read,
closeSync: () => {},
config: {
appenders: {
console: {
type: 'console',
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: {
appenders: ['console'],
level: 'INFO',
},
},
},
readdirSync: (dir) => require('fs').readdirSync(dir),
readFileSync: (file, encoding) => {
fileRead += 1;
batch.type(file, 'string');
batch.equal(file, modulePath);
batch.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: (path) => {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: new Date() };
}
throw new Error('no such file');
},
};
dependencies = {
requires: {
fs: fakeFS,
},
};
if (typeof done === 'function') {
done();
}
});
batch.test(
'when configuration file loaded via LOG4JS_CONFIG env variable',
(t) => {
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
const log4js = sandbox.require('../../lib/log4js', dependencies);
log4js.getLogger('test-logger');
t.equal(fileRead, 1, 'should load the specified local config file');
delete process.env.LOG4JS_CONFIG;
t.end();
}
);
batch.test(
'when configuration is set via configure() method call, return the log4js object',
(t) => {
const log4js = sandbox
.require('../../lib/log4js', dependencies)
.configure(fakeFS.config);
t.type(
log4js,
'object',
'Configure method call should return the log4js object!'
);
const log = log4js.getLogger('daemon');
t.type(
log,
'object',
'log4js object, returned by configure(...) method should be able to create log object.'
);
t.type(log.info, 'function');
t.end();
}
);
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,319 | refactor(#1082): removed return value for `log4js.shutdown()` | lamweili | "2022-09-03T09:10:51Z" | "2022-09-03T09:26:01Z" | 6a6029461111eb878f3110ecd782a09ef16298f7 | 588e793d49ae7c739e930b0c1c1995338aa6e724 | refactor(#1082): removed return value for `log4js.shutdown()`. | ./lib/appenders/fileSync.js | const debug = require('debug')('log4js:fileSync');
const path = require('path');
const fs = require('fs');
const os = require('os');
const eol = os.EOL;
function touchFile(file, options) {
// attempt to create the directory
const mkdir = (dir) => {
try {
return fs.mkdirSync(dir, { recursive: true });
} catch (e) {
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
// recursive creation of parent first
if (e.code === 'ENOENT') {
mkdir(path.dirname(dir));
return mkdir(dir);
}
// throw error for all except EEXIST and EROFS (read-only filesystem)
if (e.code !== 'EEXIST' && e.code !== 'EROFS') {
throw e;
}
// EEXIST: throw if file and not directory
// EROFS : throw if directory not found
else {
try {
if (fs.statSync(dir).isDirectory()) {
return dir;
}
throw e;
} catch (err) {
throw e;
}
}
}
};
mkdir(path.dirname(file));
// try to throw EISDIR, EROFS, EACCES
fs.appendFileSync(file, '', { mode: options.mode, flag: options.flags });
}
class RollingFileSync {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
this.filename = filename;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
touchFile(file, options);
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
shouldRoll() {
debug(
'should roll with current size %d, and max size %d',
this.currentSize,
this.size
);
return this.currentSize >= this.size;
}
roll(filename) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
return (
parseInt(filename_.slice(`${path.basename(filename)}.`.length), 10) || 0
);
}
function byIndex(a, b) {
return index(a) - index(b);
}
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(`${filename}.${idx + 1}`);
} catch (e) {
// ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`);
fs.renameSync(
path.join(path.dirname(filename), fileToRename),
`${filename}.${idx + 1}`
);
}
}
function renameTheFiles() {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
const files = fs.readdirSync(path.dirname(filename));
files
.filter(justTheseFiles)
.sort(byIndex)
.reverse()
.forEach(increaseFileIndex);
}
debug('Rolling, rolling, rolling');
renameTheFiles();
}
// eslint-disable-next-line no-unused-vars
write(chunk, encoding) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the file');
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug('in write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
}
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file the file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(
file,
layout,
logSize,
numBackups,
options,
timezoneOffset
) {
if (typeof file !== 'string' || file.length === 0) {
throw new Error(`Invalid filename: ${file}`);
} else if (file.endsWith(path.sep)) {
throw new Error(`Filename is a directory: ${file}`);
} else {
// handle ~ expansion: https://github.com/nodejs/node/issues/684
// exclude ~ and ~filename as these can be valid files
file = file.replace(new RegExp(`^~(?=${path.sep}.+)`), os.homedir());
}
file = path.normalize(file);
numBackups = !numBackups && numBackups !== 0 ? 5 : numBackups;
debug(
'Creating fileSync appender (',
file,
', ',
logSize,
', ',
numBackups,
', ',
options,
', ',
timezoneOffset,
')'
);
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new RollingFileSync(filePath, fileSize, numFiles, options);
} else {
stream = ((f) => {
// touch the file to apply flags (like w to truncate the file)
touchFile(f, options);
return {
write(data) {
fs.appendFileSync(f, data);
},
};
})(filePath);
}
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
return (loggingEvent) => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
const options = {
flags: config.flags || 'a',
encoding: config.encoding || 'utf8',
mode: config.mode || 0o600,
};
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
options,
config.timezoneOffset
);
}
module.exports.configure = configure;
| const debug = require('debug')('log4js:fileSync');
const path = require('path');
const fs = require('fs');
const os = require('os');
const eol = os.EOL;
function touchFile(file, options) {
// attempt to create the directory
const mkdir = (dir) => {
try {
return fs.mkdirSync(dir, { recursive: true });
} catch (e) {
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
// recursive creation of parent first
if (e.code === 'ENOENT') {
mkdir(path.dirname(dir));
return mkdir(dir);
}
// throw error for all except EEXIST and EROFS (read-only filesystem)
if (e.code !== 'EEXIST' && e.code !== 'EROFS') {
throw e;
}
// EEXIST: throw if file and not directory
// EROFS : throw if directory not found
else {
try {
if (fs.statSync(dir).isDirectory()) {
return dir;
}
throw e;
} catch (err) {
throw e;
}
}
}
};
mkdir(path.dirname(file));
// try to throw EISDIR, EROFS, EACCES
fs.appendFileSync(file, '', { mode: options.mode, flag: options.flags });
}
class RollingFileSync {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
this.filename = filename;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
touchFile(file, options);
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
shouldRoll() {
debug(
'should roll with current size %d, and max size %d',
this.currentSize,
this.size
);
return this.currentSize >= this.size;
}
roll(filename) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
return (
parseInt(filename_.slice(`${path.basename(filename)}.`.length), 10) || 0
);
}
function byIndex(a, b) {
return index(a) - index(b);
}
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(`${filename}.${idx + 1}`);
} catch (e) {
// ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`);
fs.renameSync(
path.join(path.dirname(filename), fileToRename),
`${filename}.${idx + 1}`
);
}
}
function renameTheFiles() {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
const files = fs.readdirSync(path.dirname(filename));
files
.filter(justTheseFiles)
.sort(byIndex)
.reverse()
.forEach(increaseFileIndex);
}
debug('Rolling, rolling, rolling');
renameTheFiles();
}
// eslint-disable-next-line no-unused-vars
write(chunk, encoding) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the file');
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug('in write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
}
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file the file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(
file,
layout,
logSize,
numBackups,
options,
timezoneOffset
) {
if (typeof file !== 'string' || file.length === 0) {
throw new Error(`Invalid filename: ${file}`);
} else if (file.endsWith(path.sep)) {
throw new Error(`Filename is a directory: ${file}`);
} else {
// handle ~ expansion: https://github.com/nodejs/node/issues/684
// exclude ~ and ~filename as these can be valid files
file = file.replace(new RegExp(`^~(?=${path.sep}.+)`), os.homedir());
}
file = path.normalize(file);
numBackups = !numBackups && numBackups !== 0 ? 5 : numBackups;
debug(
'Creating fileSync appender (',
file,
', ',
logSize,
', ',
numBackups,
', ',
options,
', ',
timezoneOffset,
')'
);
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new RollingFileSync(filePath, fileSize, numFiles, options);
} else {
stream = ((f) => {
// touch the file to apply flags (like w to truncate the file)
touchFile(f, options);
return {
write(data) {
fs.appendFileSync(f, data);
},
};
})(filePath);
}
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
return (loggingEvent) => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
const options = {
flags: config.flags || 'a',
encoding: config.encoding || 'utf8',
mode: config.mode || 0o600,
};
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
options,
config.timezoneOffset
);
}
module.exports.configure = configure;
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./docs/layouts.md | # Layouts
Layouts are functions used by appenders to format log events for output. They take a log event as an argument and return a string. Log4js comes with several appenders built-in, and provides ways to create your own if these are not suitable.
For most use cases you will not need to configure layouts - there are some appenders which do not need layouts defined (for example, [logFaces-UDP](https://github.com/log4js-node/logFaces-UDP)); all the appenders that use layouts will have a sensible default defined.
## Configuration
Most appender configuration will take a field called `layout`, which is an object - typically with a single field `type` which is the name of a layout defined below. Some layouts require extra configuration options, which should be included in the same object.
## Example
```javascript
log4js.configure({
appenders: { out: { type: "stdout", layout: { type: "basic" } } },
categories: { default: { appenders: ["out"], level: "info" } },
});
```
This configuration replaces the [stdout](stdout.md) appender's default `coloured` layout with `basic` layout.
# Built-in Layouts
## Basic
- `type` - `basic`
Basic layout will output the timestamp, level, category, followed by the formatted log event data.
## Example
```javascript
log4js.configure({
appenders: { out: { type: "stdout", layout: { type: "basic" } } },
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger("cheese");
logger.error("Cheese is too ripe!");
```
This will output:
```
[2017-03-30 07:57:00.113] [ERROR] cheese - Cheese is too ripe!
```
## Coloured
- `type` - `coloured` (or `colored`)
This layout is the same as `basic`, except that the timestamp, level and category will be coloured according to the log event's level (if your terminal/file supports it - if you see some weird characters in your output and no colour then you should probably switch to `basic`). The colours used are:
- `TRACE` - 'blue'
- `DEBUG` - 'cyan'
- `INFO` - 'green'
- `WARN` - 'yellow'
- `ERROR` - 'red'
- `FATAL` - 'magenta'
## Message Pass-Through
- `type` - `messagePassThrough`
This layout just formats the log event data, and does not output a timestamp, level or category. It is typically used in appenders that serialise the events using a specific format (e.g. [gelf](https://github.com/log4js-node/gelf)).
## Example
```javascript
log4js.configure({
appenders: {
out: { type: "stdout", layout: { type: "messagePassThrough" } },
},
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger("cheese");
const cheeseName = "gouda";
logger.error("Cheese is too ripe! Cheese was: ", cheeseName);
```
This will output:
```
Cheese is too ripe! Cheese was: gouda
```
## Dummy
- `type` - `dummy`
This layout only outputs the first value in the log event's data. It was added for the [logstashUDP](https://github.com/log4js-node/logstashUDP) appender, and I'm not sure there's much use for it outside that.
## Example
```javascript
log4js.configure({
appenders: { out: { type: "stdout", layout: { type: "dummy" } } },
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger("cheese");
const cheeseName = "gouda";
logger.error("Cheese is too ripe! Cheese was: ", cheeseName);
```
This will output:
```
Cheese is too ripe! Cheese was:
```
## Pattern
- `type` - `pattern`
- `pattern` - `string` - specifier for the output format, using placeholders as described below
- `tokens` - `object` (optional) - user-defined tokens to be used in the pattern
## Pattern format
The pattern string can contain any characters, but sequences beginning with `%` will be replaced with values taken from the log event, and other environmental values.
Format for specifiers is `%[padding].[truncation][field]{[format]}` - padding and truncation are optional, and format only applies to a few tokens (notably, date). Both padding and truncation values can be negative.
- Positive truncation - truncate the string starting from the beginning
- Negative truncation - truncate the string starting from the end of the string
- Positive padding - left pad the string to make it this length, if the string is longer than the padding value then nothing happens
- Negative padding - right pad the string to make it this length, if the string is longer than the padding value then nothing happens
To make fixed-width columns in your log output, set padding and truncation to the same size (they don't have to have the same sign though, you could have right truncated, left padded columns that are always 10 characters wide with a pattern like "%10.-10m").
e.g. %5.10p - left pad the log level by up to 5 characters, keep the whole string to a max length of 10.
So, for a log level of INFO the output would be " INFO", for DEBUG it would be "DEBUG" and for a (custom) log level of CATASTROPHIC it would be "CATASTROPH".
Fields can be any of:
- `%r` time in toLocaleTimeString format
- `%p` log level
- `%c` log category
- `%h` hostname
- `%m` log data
- `%d` date, formatted - default is `ISO8601`, format options are: `ISO8601`, `ISO8601_WITH_TZ_OFFSET`, `ABSOLUTETIME`, `DATETIME`, or any string compatible with the [date-format](https://www.npmjs.com/package/date-format) library. e.g. `%d{DATETIME}`, `%d{yyyy/MM/dd-hh.mm.ss}`
- `%%` % - for when you want a literal `%` in your output
- `%n` newline
- `%z` process id (from `process.pid`)
- `%f` full path of filename (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%f{depth}` path's depth let you chose to have only filename (`%f{1}`) or a chosen number of directories
- `%l` line number (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%o` column postion (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%s` call stack (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%M` function or method name (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%C` class name (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%A` function or method name alias (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%F` fully qualified caller name (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%x{<tokenname>}` add dynamic tokens to your log. Tokens are specified in the tokens parameter.
- `%X{<tokenname>}` add values from the Logger context. Tokens are keys into the context values.
- `%[` start a coloured block (colour will be taken from the log level, similar to `colouredLayout`)
- `%]` end a coloured block
## Tokens
User-defined tokens can be either a string or a function. Functions will be passed the log event, and should return a string. For example, you could define a custom token that outputs the log event's context value for 'user' like so:
```javascript
log4js.configure({
appenders: {
out: {
type: "stdout",
layout: {
type: "pattern",
pattern: "%d %p %c %x{user} %m%n",
tokens: {
user: function(logEvent) {
return AuthLibrary.currentUser();
},
},
},
},
},
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger();
logger.info("doing something.");
```
This would output:
```
2017-06-01 08:32:56.283 INFO default charlie doing something.
```
You can also use the Logger context to store tokens (sometimes called Nested Diagnostic Context, or Mapped Diagnostic Context) and use them in your layouts.
```javascript
log4js.configure({
appenders: {
out: {
type: "stdout",
layout: {
type: "pattern",
pattern: "%d %p %c %X{user} %m%n",
},
},
},
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger();
logger.addContext("user", "charlie");
logger.info("doing something.");
```
This would output:
```
2017-06-01 08:32:56.283 INFO default charlie doing something.
```
Note that you can also add functions to the Logger Context, and they will be passed the logEvent as well.
# Adding your own layouts
You can add your own layouts by calling `log4js.addLayout(type, fn)` before calling `log4js.configure`. `type` is the label you want to use to refer to your layout in appender configuration. `fn` is a function that takes a single object argument, which will contain the configuration for the layout instance, and returns a layout function. A layout function takes a log event argument and returns a string (usually, although you could return anything as long as the appender knows what to do with it).
## Custom Layout Example
This example can also be found in examples/custom-layout.js.
```javascript
const log4js = require("log4js");
log4js.addLayout("json", function(config) {
return function(logEvent) {
return JSON.stringify(logEvent) + config.separator;
};
});
log4js.configure({
appenders: {
out: { type: "stdout", layout: { type: "json", separator: "," } },
},
categories: {
default: { appenders: ["out"], level: "info" },
},
});
const logger = log4js.getLogger("json-test");
logger.info("this is just a test");
logger.error("of a custom appender");
logger.warn("that outputs json");
log4js.shutdown(() => {});
```
This example outputs the following:
```javascript
{"startTime":"2017-06-05T22:23:08.479Z","categoryName":"json-test","data":["this is just a test"],"level":{"level":20000,"levelStr":"INFO"},"context":{}},
{"startTime":"2017-06-05T22:23:08.483Z","categoryName":"json-test","data":["of a custom appender"],"level":{"level":40000,"levelStr":"ERROR"},"context":{}},
{"startTime":"2017-06-05T22:23:08.483Z","categoryName":"json-test","data":["that outputs json"],"level":{"level":30000,"levelStr":"WARN"},"context":{}},
```
| # Layouts
Layouts are functions used by appenders to format log events for output. They take a log event as an argument and return a string. Log4js comes with several appenders built-in, and provides ways to create your own if these are not suitable.
For most use cases you will not need to configure layouts - there are some appenders which do not need layouts defined (for example, [logFaces-UDP](https://github.com/log4js-node/logFaces-UDP)); all the appenders that use layouts will have a sensible default defined.
## Configuration
Most appender configuration will take a field called `layout`, which is an object - typically with a single field `type` which is the name of a layout defined below. Some layouts require extra configuration options, which should be included in the same object.
## Example
```javascript
log4js.configure({
appenders: { out: { type: "stdout", layout: { type: "basic" } } },
categories: { default: { appenders: ["out"], level: "info" } },
});
```
This configuration replaces the [stdout](stdout.md) appender's default `coloured` layout with `basic` layout.
# Built-in Layouts
## Basic
- `type` - `basic`
Basic layout will output the timestamp, level, category, followed by the formatted log event data.
## Example
```javascript
log4js.configure({
appenders: { out: { type: "stdout", layout: { type: "basic" } } },
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger("cheese");
logger.error("Cheese is too ripe!");
```
This will output:
```
[2017-03-30 07:57:00.113] [ERROR] cheese - Cheese is too ripe!
```
## Coloured
- `type` - `coloured` (or `colored`)
This layout is the same as `basic`, except that the timestamp, level and category will be coloured according to the log event's level (if your terminal/file supports it - if you see some weird characters in your output and no colour then you should probably switch to `basic`). The colours used are:
- `TRACE` - 'blue'
- `DEBUG` - 'cyan'
- `INFO` - 'green'
- `WARN` - 'yellow'
- `ERROR` - 'red'
- `FATAL` - 'magenta'
## Message Pass-Through
- `type` - `messagePassThrough`
This layout just formats the log event data, and does not output a timestamp, level or category. It is typically used in appenders that serialise the events using a specific format (e.g. [gelf](https://github.com/log4js-node/gelf)).
## Example
```javascript
log4js.configure({
appenders: {
out: { type: "stdout", layout: { type: "messagePassThrough" } },
},
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger("cheese");
const cheeseName = "gouda";
logger.error("Cheese is too ripe! Cheese was: ", cheeseName);
```
This will output:
```
Cheese is too ripe! Cheese was: gouda
```
## Dummy
- `type` - `dummy`
This layout only outputs the first value in the log event's data. It was added for the [logstashUDP](https://github.com/log4js-node/logstashUDP) appender, and I'm not sure there's much use for it outside that.
## Example
```javascript
log4js.configure({
appenders: { out: { type: "stdout", layout: { type: "dummy" } } },
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger("cheese");
const cheeseName = "gouda";
logger.error("Cheese is too ripe! Cheese was: ", cheeseName);
```
This will output:
```
Cheese is too ripe! Cheese was:
```
## Pattern
- `type` - `pattern`
- `pattern` - `string` - specifier for the output format, using placeholders as described below
- `tokens` - `object` (optional) - user-defined tokens to be used in the pattern
## Pattern format
The pattern string can contain any characters, but sequences beginning with `%` will be replaced with values taken from the log event, and other environmental values.
Format for specifiers is `%[padding].[truncation][field]{[format]}` - padding and truncation are optional, and format only applies to a few tokens (notably, date). Both padding and truncation values can be negative.
- Positive truncation - truncate the string starting from the beginning
- Negative truncation - truncate the string starting from the end of the string
- Positive padding - left pad the string to make it this length, if the string is longer than the padding value then nothing happens
- Negative padding - right pad the string to make it this length, if the string is longer than the padding value then nothing happens
To make fixed-width columns in your log output, set padding and truncation to the same size (they don't have to have the same sign though, you could have right truncated, left padded columns that are always 10 characters wide with a pattern like "%10.-10m").
e.g. %5.10p - left pad the log level by up to 5 characters, keep the whole string to a max length of 10.
So, for a log level of INFO the output would be " INFO", for DEBUG it would be "DEBUG" and for a (custom) log level of CATASTROPHIC it would be "CATASTROPH".
Fields can be any of:
- `%r` time in toLocaleTimeString format
- `%p` log level
- `%c` log category
- `%h` hostname
- `%m` log data
- `%d` date, formatted - default is `ISO8601`, format options are: `ISO8601`, `ISO8601_WITH_TZ_OFFSET`, `ABSOLUTETIME`, `DATETIME`, or any string compatible with the [date-format](https://www.npmjs.com/package/date-format) library. e.g. `%d{DATETIME}`, `%d{yyyy/MM/dd-hh.mm.ss}`
- `%%` % - for when you want a literal `%` in your output
- `%n` newline
- `%z` process id (from `process.pid`)
- `%f` full path of filename (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%f{depth}` path's depth let you chose to have only filename (`%f{1}`) or a chosen number of directories
- `%l` line number (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%o` column postion (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%s` call stack (requires `enableCallStack: true` on the category, see [configuration object](api.md))
- `%C` class name (requires `enableCallStack: true` on the category, see [configuration object](api.md) and [#1316](https://github.com/log4js-node/log4js-node/pull/1316))
- `%M` method or function name (requires `enableCallStack: true` on the category, see [configuration object](api.md) and [#1316](https://github.com/log4js-node/log4js-node/pull/1316))
- `%A` method or function alias (requires `enableCallStack: true` on the category, see [configuration object](api.md) and [#1316](https://github.com/log4js-node/log4js-node/pull/1316))
- `%F` fully qualified caller name (requires `enableCallStack: true` on the category, see [configuration object](api.md) and [#1316](https://github.com/log4js-node/log4js-node/pull/1316))
- `%x{<tokenname>}` add dynamic tokens to your log. Tokens are specified in the tokens parameter.
- `%X{<tokenname>}` add values from the Logger context. Tokens are keys into the context values.
- `%[` start a coloured block (colour will be taken from the log level, similar to `colouredLayout`)
- `%]` end a coloured block
## Tokens
User-defined tokens can be either a string or a function. Functions will be passed the log event, and should return a string. For example, you could define a custom token that outputs the log event's context value for 'user' like so:
```javascript
log4js.configure({
appenders: {
out: {
type: "stdout",
layout: {
type: "pattern",
pattern: "%d %p %c %x{user} %m%n",
tokens: {
user: function(logEvent) {
return AuthLibrary.currentUser();
},
},
},
},
},
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger();
logger.info("doing something.");
```
This would output:
```
2017-06-01 08:32:56.283 INFO default charlie doing something.
```
You can also use the Logger context to store tokens (sometimes called Nested Diagnostic Context, or Mapped Diagnostic Context) and use them in your layouts.
```javascript
log4js.configure({
appenders: {
out: {
type: "stdout",
layout: {
type: "pattern",
pattern: "%d %p %c %X{user} %m%n",
},
},
},
categories: { default: { appenders: ["out"], level: "info" } },
});
const logger = log4js.getLogger();
logger.addContext("user", "charlie");
logger.info("doing something.");
```
This would output:
```
2017-06-01 08:32:56.283 INFO default charlie doing something.
```
Note that you can also add functions to the Logger Context, and they will be passed the logEvent as well.
# Adding your own layouts
You can add your own layouts by calling `log4js.addLayout(type, fn)` before calling `log4js.configure`. `type` is the label you want to use to refer to your layout in appender configuration. `fn` is a function that takes a single object argument, which will contain the configuration for the layout instance, and returns a layout function. A layout function takes a log event argument and returns a string (usually, although you could return anything as long as the appender knows what to do with it).
## Custom Layout Example
This example can also be found in examples/custom-layout.js.
```javascript
const log4js = require("log4js");
log4js.addLayout("json", function(config) {
return function(logEvent) {
return JSON.stringify(logEvent) + config.separator;
};
});
log4js.configure({
appenders: {
out: { type: "stdout", layout: { type: "json", separator: "," } },
},
categories: {
default: { appenders: ["out"], level: "info" },
},
});
const logger = log4js.getLogger("json-test");
logger.info("this is just a test");
logger.error("of a custom appender");
logger.warn("that outputs json");
log4js.shutdown(() => {});
```
This example outputs the following:
```javascript
{"startTime":"2017-06-05T22:23:08.479Z","categoryName":"json-test","data":["this is just a test"],"level":{"level":20000,"levelStr":"INFO"},"context":{}},
{"startTime":"2017-06-05T22:23:08.483Z","categoryName":"json-test","data":["of a custom appender"],"level":{"level":40000,"levelStr":"ERROR"},"context":{}},
{"startTime":"2017-06-05T22:23:08.483Z","categoryName":"json-test","data":["that outputs json"],"level":{"level":30000,"levelStr":"WARN"},"context":{}},
```
| 1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./lib/LoggingEvent.js | const flatted = require('flatted');
const levels = require('./levels');
/**
* @name LoggingEvent
* @namespace Log4js
*/
class LoggingEvent {
/**
* Models a logging event.
* @constructor
* @param {string} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {Array} data objects to log
* @author Seth Chisamore
*/
constructor(categoryName, level, data, context, location) {
this.startTime = new Date();
this.categoryName = categoryName;
this.data = data;
this.level = level;
this.context = Object.assign({}, context); // eslint-disable-line prefer-object-spread
this.pid = process.pid;
if (location) {
this.functionName = location.functionName;
this.fileName = location.fileName;
this.lineNumber = location.lineNumber;
this.columnNumber = location.columnNumber;
this.callStack = location.callStack;
this.className = location.className;
this.functionAlias = location.functionAlias;
this.callerName = location.callerName;
}
}
serialise() {
return flatted.stringify(this, (key, value) => {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
// duck-typing for Error object
if (value && value.message && value.stack) {
// eslint-disable-next-line prefer-object-spread
value = Object.assign(
{ message: value.message, stack: value.stack },
value
);
}
// JSON.stringify({a: parseInt('abc'), b: 1/0, c: -1/0}) returns {a: null, b: null, c: null}.
// The following allows us to serialize to NaN, Infinity and -Infinity correctly.
else if (
typeof value === 'number' &&
(Number.isNaN(value) || !Number.isFinite(value))
) {
value = value.toString();
}
// JSON.stringify([undefined]) returns [null].
// The following allows us to serialize to undefined correctly.
else if (typeof value === 'undefined') {
value = typeof value;
}
return value;
});
}
static deserialise(serialised) {
let event;
try {
const rehydratedEvent = flatted.parse(serialised, (key, value) => {
if (value && value.message && value.stack) {
const fakeError = new Error(value);
Object.keys(value).forEach((k) => {
fakeError[k] = value[k];
});
value = fakeError;
}
return value;
});
rehydratedEvent.location = {
functionName: rehydratedEvent.functionName,
fileName: rehydratedEvent.fileName,
lineNumber: rehydratedEvent.lineNumber,
columnNumber: rehydratedEvent.columnNumber,
callStack: rehydratedEvent.callStack,
className: rehydratedEvent.className,
functionAlias: rehydratedEvent.functionAlias,
callerName: rehydratedEvent.callerName,
};
event = new LoggingEvent(
rehydratedEvent.categoryName,
levels.getLevel(rehydratedEvent.level.levelStr),
rehydratedEvent.data,
rehydratedEvent.context,
rehydratedEvent.location
);
event.startTime = new Date(rehydratedEvent.startTime);
event.pid = rehydratedEvent.pid;
event.cluster = rehydratedEvent.cluster;
} catch (e) {
event = new LoggingEvent('log4js', levels.ERROR, [
'Unable to parse log:',
serialised,
'because: ',
e,
]);
}
return event;
}
}
module.exports = LoggingEvent;
| const flatted = require('flatted');
const levels = require('./levels');
/**
* @name LoggingEvent
* @namespace Log4js
*/
class LoggingEvent {
/**
* Models a logging event.
* @constructor
* @param {string} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {Array} data objects to log
* @author Seth Chisamore
*/
constructor(categoryName, level, data, context, location) {
this.startTime = new Date();
this.categoryName = categoryName;
this.data = data;
this.level = level;
this.context = Object.assign({}, context); // eslint-disable-line prefer-object-spread
this.pid = process.pid;
if (location) {
this.fileName = location.fileName;
this.lineNumber = location.lineNumber;
this.columnNumber = location.columnNumber;
this.callStack = location.callStack;
this.className = location.className;
this.functionName = location.functionName;
this.functionAlias = location.functionAlias;
this.callerName = location.callerName;
}
}
serialise() {
return flatted.stringify(this, (key, value) => {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
// duck-typing for Error object
if (value && value.message && value.stack) {
// eslint-disable-next-line prefer-object-spread
value = Object.assign(
{ message: value.message, stack: value.stack },
value
);
}
// JSON.stringify({a: parseInt('abc'), b: 1/0, c: -1/0}) returns {a: null, b: null, c: null}.
// The following allows us to serialize to NaN, Infinity and -Infinity correctly.
else if (
typeof value === 'number' &&
(Number.isNaN(value) || !Number.isFinite(value))
) {
value = value.toString();
}
// JSON.stringify([undefined]) returns [null].
// The following allows us to serialize to undefined correctly.
else if (typeof value === 'undefined') {
value = typeof value;
}
return value;
});
}
static deserialise(serialised) {
let event;
try {
const rehydratedEvent = flatted.parse(serialised, (key, value) => {
if (value && value.message && value.stack) {
const fakeError = new Error(value);
Object.keys(value).forEach((k) => {
fakeError[k] = value[k];
});
value = fakeError;
}
return value;
});
rehydratedEvent.location = {
fileName: rehydratedEvent.fileName,
lineNumber: rehydratedEvent.lineNumber,
columnNumber: rehydratedEvent.columnNumber,
callStack: rehydratedEvent.callStack,
className: rehydratedEvent.className,
functionName: rehydratedEvent.functionName,
functionAlias: rehydratedEvent.functionAlias,
callerName: rehydratedEvent.callerName,
};
event = new LoggingEvent(
rehydratedEvent.categoryName,
levels.getLevel(rehydratedEvent.level.levelStr),
rehydratedEvent.data,
rehydratedEvent.context,
rehydratedEvent.location
);
event.startTime = new Date(rehydratedEvent.startTime);
event.pid = rehydratedEvent.pid;
event.cluster = rehydratedEvent.cluster;
} catch (e) {
event = new LoggingEvent('log4js', levels.ERROR, [
'Unable to parse log:',
serialised,
'because: ',
e,
]);
}
return event;
}
}
module.exports = LoggingEvent;
| 1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./lib/layouts.js | const dateFormat = require('date-format');
const os = require('os');
const util = require('util');
const path = require('path');
const url = require('url');
const debug = require('debug')('log4js:layouts');
const styles = {
// styles
bold: [1, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
// grayscale
white: [37, 39],
grey: [90, 39],
black: [90, 39],
// colors
blue: [34, 39],
cyan: [36, 39],
green: [32, 39],
magenta: [35, 39],
red: [91, 39],
yellow: [33, 39],
};
function colorizeStart(style) {
return style ? `\x1B[${styles[style][0]}m` : '';
}
function colorizeEnd(style) {
return style ? `\x1B[${styles[style][1]}m` : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize(str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
return colorize(
util.format(
'[%s] [%s] %s - ',
dateFormat.asString(loggingEvent.startTime),
loggingEvent.level.toString(),
loggingEvent.categoryName
),
colour
);
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout(loggingEvent) {
return (
timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data)
);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout(loggingEvent) {
return (
timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) +
util.format(...loggingEvent.data)
);
}
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* both padding and truncation can be negative.
* Negative truncation = trunc from end of string
* Positive truncation = trunc from start of string
* Negative padding = pad right
* Positive padding = pad left
*
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in constious formats
* - %% %
* - %n newline
* - %z pid
* - %f filename
* - %l line number
* - %o column postion
* - %s call stack
* - %M method or function name
* - %C class name
* - %A method or function name
* - %F fully qualified caller name
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* - %X{<tokenname>} add dynamic tokens to your log. Tokens are specified in logger context
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { 'pid' : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @return {Function}
* @param pattern
* @param tokens
* @param timezoneOffset
*
* @authors ['Stephan Strittmatter', 'Jan Schmidle']
*/
function patternLayout(pattern, tokens) {
const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n';
const regex = /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflosMCAF%])(\{([^}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
let loggerName = loggingEvent.categoryName;
if (specifier) {
const precision = parseInt(specifier, 10);
const loggerNameBits = loggerName.split('.');
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits
.slice(loggerNameBits.length - precision)
.join('.');
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
let format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
switch (format) {
case 'ISO8601':
case 'ISO8601_FORMAT':
format = dateFormat.ISO8601_FORMAT;
break;
case 'ISO8601_WITH_TZ_OFFSET':
case 'ISO8601_WITH_TZ_OFFSET_FORMAT':
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
break;
case 'ABSOLUTE':
process.emitWarning(
'Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. ' +
'Please use %d{ABSOLUTETIME} instead.',
'DeprecationWarning',
'log4js-node-DEP0003'
);
debug(
'[log4js-node-DEP0003]',
'DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.'
);
// falls through
case 'ABSOLUTETIME':
case 'ABSOLUTETIME_FORMAT':
format = dateFormat.ABSOLUTETIME_FORMAT;
break;
case 'DATE':
process.emitWarning(
'Pattern %d{DATE} is deprecated due to the confusion it causes when used. ' +
'Please use %d{DATETIME} instead.',
'DeprecationWarning',
'log4js-node-DEP0004'
);
debug(
'[log4js-node-DEP0004]',
'DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.'
);
// falls through
case 'DATETIME':
case 'DATETIME_FORMAT':
format = dateFormat.DATETIME_FORMAT;
break;
// no default
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return util.format(...loggingEvent.data);
}
function endOfLine() {
return os.EOL;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime);
}
function startColour(loggingEvent) {
return colorizeStart(loggingEvent.level.colour);
}
function endColour(loggingEvent) {
return colorizeEnd(loggingEvent.level.colour);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
return loggingEvent && loggingEvent.pid
? loggingEvent.pid.toString()
: process.pid.toString();
}
function clusterInfo() {
// this used to try to return the master and worker pids,
// but it would never have worked because master pid is not available to workers
// leaving this here to maintain compatibility for patterns
return pid();
}
function userDefined(loggingEvent, specifier) {
if (typeof tokens[specifier] !== 'undefined') {
return typeof tokens[specifier] === 'function'
? tokens[specifier](loggingEvent)
: tokens[specifier];
}
return null;
}
function contextDefined(loggingEvent, specifier) {
const resolver = loggingEvent.context[specifier];
if (typeof resolver !== 'undefined') {
return typeof resolver === 'function' ? resolver(loggingEvent) : resolver;
}
return null;
}
function fileName(loggingEvent, specifier) {
let filename = loggingEvent.fileName || '';
// support for ESM as it uses url instead of path for file
/* istanbul ignore next: unsure how to simulate ESM for test coverage */
const convertFileURLToPath = function(filepath) {
const urlPrefix = 'file://';
if (filepath.startsWith(urlPrefix)) {
// https://nodejs.org/api/url.html#urlfileurltopathurl
if (typeof url.fileURLToPath === 'function') {
filepath = url.fileURLToPath(filepath);
}
// backward-compatible for nodejs pre-10.12.0 (without url.fileURLToPath method)
else {
// posix: file:///hello/world/foo.txt -> /hello/world/foo.txt -> /hello/world/foo.txt
// win32: file:///C:/path/foo.txt -> /C:/path/foo.txt -> \C:\path\foo.txt -> C:\path\foo.txt
// win32: file://nas/foo.txt -> //nas/foo.txt -> nas\foo.txt -> \\nas\foo.txt
filepath = path.normalize(
filepath.replace(new RegExp(`^${urlPrefix}`), '')
);
if (process.platform === 'win32') {
if (filepath.startsWith('\\')) {
filepath = filepath.slice(1);
} else {
filepath = path.sep + path.sep + filepath;
}
}
}
}
return filepath;
};
filename = convertFileURLToPath(filename);
if (specifier) {
const fileDepth = parseInt(specifier, 10);
const fileList = filename.split(path.sep);
if (fileList.length > fileDepth) {
filename = fileList.slice(-fileDepth).join(path.sep);
}
}
return filename;
}
function lineNumber(loggingEvent) {
return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : '';
}
function columnNumber(loggingEvent) {
return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : '';
}
function callStack(loggingEvent) {
return loggingEvent.callStack || '';
}
function functionName(loggingEvent) {
return loggingEvent.functionName || '';
}
function className(loggingEvent) {
return loggingEvent.className || '';
}
function functionAlias(loggingEvent) {
return loggingEvent.functionAlias || '';
}
function callerName(loggingEvent) {
return loggingEvent.callerName || '';
}
const replacers = {
c: categoryName,
d: formatAsDate,
h: hostname,
m: formatMessage,
n: endOfLine,
p: logLevel,
r: startTime,
'[': startColour,
']': endColour,
y: clusterInfo,
z: pid,
'%': percent,
x: userDefined,
X: contextDefined,
f: fileName,
l: lineNumber,
o: columnNumber,
s: callStack,
M: functionName,
C: className,
A: functionAlias,
F: callerName,
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
let len;
if (truncation) {
len = parseInt(truncation.slice(1), 10);
// negative truncate length means truncate from end of string
return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len);
}
return toTruncate;
}
function pad(padding, toPad) {
let len;
if (padding) {
if (padding.charAt(0) === '-') {
len = parseInt(padding.slice(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += ' ';
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = ` ${toPad}`;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
let replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function(loggingEvent) {
let formattedString = '';
let result;
let searchString = pattern;
while ((result = regex.exec(searchString)) !== null) {
// const matchedString = result[0];
const padding = result[1];
const truncation = result[2];
const conversionCharacter = result[3];
const specifier = result[5];
const text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += text.toString();
} else {
// Create a raw replacement string based on the conversion
// character and specifier
const replacement = replaceToken(
conversionCharacter,
loggingEvent,
specifier
);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.slice(result.index + result[0].length);
}
return formattedString;
};
}
const layoutMakers = {
messagePassThrough() {
return messagePassThroughLayout;
},
basic() {
return basicLayout;
},
colored() {
return colouredLayout;
},
coloured() {
return colouredLayout;
},
pattern(config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
dummy() {
return dummyLayout;
},
};
module.exports = {
basicLayout,
messagePassThroughLayout,
patternLayout,
colouredLayout,
coloredLayout: colouredLayout,
dummyLayout,
addLayout(name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout(name, config) {
return layoutMakers[name] && layoutMakers[name](config);
},
};
| const dateFormat = require('date-format');
const os = require('os');
const util = require('util');
const path = require('path');
const url = require('url');
const debug = require('debug')('log4js:layouts');
const styles = {
// styles
bold: [1, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
// grayscale
white: [37, 39],
grey: [90, 39],
black: [90, 39],
// colors
blue: [34, 39],
cyan: [36, 39],
green: [32, 39],
magenta: [35, 39],
red: [91, 39],
yellow: [33, 39],
};
function colorizeStart(style) {
return style ? `\x1B[${styles[style][0]}m` : '';
}
function colorizeEnd(style) {
return style ? `\x1B[${styles[style][1]}m` : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize(str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
return colorize(
util.format(
'[%s] [%s] %s - ',
dateFormat.asString(loggingEvent.startTime),
loggingEvent.level.toString(),
loggingEvent.categoryName
),
colour
);
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout(loggingEvent) {
return (
timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data)
);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout(loggingEvent) {
return (
timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) +
util.format(...loggingEvent.data)
);
}
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* both padding and truncation can be negative.
* Negative truncation = trunc from end of string
* Positive truncation = trunc from start of string
* Negative padding = pad right
* Positive padding = pad left
*
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in constious formats
* - %% %
* - %n newline
* - %z pid
* - %f filename
* - %l line number
* - %o column postion
* - %s call stack
* - %C class name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %M method or function name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %A method or function alias [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %F fully qualified caller name [#1316](https://github.com/log4js-node/log4js-node/pull/1316)
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* - %X{<tokenname>} add dynamic tokens to your log. Tokens are specified in logger context
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { 'pid' : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @return {Function}
* @param pattern
* @param tokens
* @param timezoneOffset
*
* @authors ['Stephan Strittmatter', 'Jan Schmidle']
*/
function patternLayout(pattern, tokens) {
const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n';
const regex = /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflosCMAF%])(\{([^}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
let loggerName = loggingEvent.categoryName;
if (specifier) {
const precision = parseInt(specifier, 10);
const loggerNameBits = loggerName.split('.');
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits
.slice(loggerNameBits.length - precision)
.join('.');
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
let format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
switch (format) {
case 'ISO8601':
case 'ISO8601_FORMAT':
format = dateFormat.ISO8601_FORMAT;
break;
case 'ISO8601_WITH_TZ_OFFSET':
case 'ISO8601_WITH_TZ_OFFSET_FORMAT':
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
break;
case 'ABSOLUTE':
process.emitWarning(
'Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. ' +
'Please use %d{ABSOLUTETIME} instead.',
'DeprecationWarning',
'log4js-node-DEP0003'
);
debug(
'[log4js-node-DEP0003]',
'DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.'
);
// falls through
case 'ABSOLUTETIME':
case 'ABSOLUTETIME_FORMAT':
format = dateFormat.ABSOLUTETIME_FORMAT;
break;
case 'DATE':
process.emitWarning(
'Pattern %d{DATE} is deprecated due to the confusion it causes when used. ' +
'Please use %d{DATETIME} instead.',
'DeprecationWarning',
'log4js-node-DEP0004'
);
debug(
'[log4js-node-DEP0004]',
'DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.'
);
// falls through
case 'DATETIME':
case 'DATETIME_FORMAT':
format = dateFormat.DATETIME_FORMAT;
break;
// no default
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return util.format(...loggingEvent.data);
}
function endOfLine() {
return os.EOL;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime);
}
function startColour(loggingEvent) {
return colorizeStart(loggingEvent.level.colour);
}
function endColour(loggingEvent) {
return colorizeEnd(loggingEvent.level.colour);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
return loggingEvent && loggingEvent.pid
? loggingEvent.pid.toString()
: process.pid.toString();
}
function clusterInfo() {
// this used to try to return the master and worker pids,
// but it would never have worked because master pid is not available to workers
// leaving this here to maintain compatibility for patterns
return pid();
}
function userDefined(loggingEvent, specifier) {
if (typeof tokens[specifier] !== 'undefined') {
return typeof tokens[specifier] === 'function'
? tokens[specifier](loggingEvent)
: tokens[specifier];
}
return null;
}
function contextDefined(loggingEvent, specifier) {
const resolver = loggingEvent.context[specifier];
if (typeof resolver !== 'undefined') {
return typeof resolver === 'function' ? resolver(loggingEvent) : resolver;
}
return null;
}
function fileName(loggingEvent, specifier) {
let filename = loggingEvent.fileName || '';
// support for ESM as it uses url instead of path for file
/* istanbul ignore next: unsure how to simulate ESM for test coverage */
const convertFileURLToPath = function(filepath) {
const urlPrefix = 'file://';
if (filepath.startsWith(urlPrefix)) {
// https://nodejs.org/api/url.html#urlfileurltopathurl
if (typeof url.fileURLToPath === 'function') {
filepath = url.fileURLToPath(filepath);
}
// backward-compatible for nodejs pre-10.12.0 (without url.fileURLToPath method)
else {
// posix: file:///hello/world/foo.txt -> /hello/world/foo.txt -> /hello/world/foo.txt
// win32: file:///C:/path/foo.txt -> /C:/path/foo.txt -> \C:\path\foo.txt -> C:\path\foo.txt
// win32: file://nas/foo.txt -> //nas/foo.txt -> nas\foo.txt -> \\nas\foo.txt
filepath = path.normalize(
filepath.replace(new RegExp(`^${urlPrefix}`), '')
);
if (process.platform === 'win32') {
if (filepath.startsWith('\\')) {
filepath = filepath.slice(1);
} else {
filepath = path.sep + path.sep + filepath;
}
}
}
}
return filepath;
};
filename = convertFileURLToPath(filename);
if (specifier) {
const fileDepth = parseInt(specifier, 10);
const fileList = filename.split(path.sep);
if (fileList.length > fileDepth) {
filename = fileList.slice(-fileDepth).join(path.sep);
}
}
return filename;
}
function lineNumber(loggingEvent) {
return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : '';
}
function columnNumber(loggingEvent) {
return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : '';
}
function callStack(loggingEvent) {
return loggingEvent.callStack || '';
}
function className(loggingEvent) {
return loggingEvent.className || '';
}
function functionName(loggingEvent) {
return loggingEvent.functionName || '';
}
function functionAlias(loggingEvent) {
return loggingEvent.functionAlias || '';
}
function callerName(loggingEvent) {
return loggingEvent.callerName || '';
}
const replacers = {
c: categoryName,
d: formatAsDate,
h: hostname,
m: formatMessage,
n: endOfLine,
p: logLevel,
r: startTime,
'[': startColour,
']': endColour,
y: clusterInfo,
z: pid,
'%': percent,
x: userDefined,
X: contextDefined,
f: fileName,
l: lineNumber,
o: columnNumber,
s: callStack,
C: className,
M: functionName,
A: functionAlias,
F: callerName,
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
let len;
if (truncation) {
len = parseInt(truncation.slice(1), 10);
// negative truncate length means truncate from end of string
return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len);
}
return toTruncate;
}
function pad(padding, toPad) {
let len;
if (padding) {
if (padding.charAt(0) === '-') {
len = parseInt(padding.slice(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += ' ';
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = ` ${toPad}`;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
let replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function(loggingEvent) {
let formattedString = '';
let result;
let searchString = pattern;
while ((result = regex.exec(searchString)) !== null) {
// const matchedString = result[0];
const padding = result[1];
const truncation = result[2];
const conversionCharacter = result[3];
const specifier = result[5];
const text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += text.toString();
} else {
// Create a raw replacement string based on the conversion
// character and specifier
const replacement = replaceToken(
conversionCharacter,
loggingEvent,
specifier
);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.slice(result.index + result[0].length);
}
return formattedString;
};
}
const layoutMakers = {
messagePassThrough() {
return messagePassThroughLayout;
},
basic() {
return basicLayout;
},
colored() {
return colouredLayout;
},
coloured() {
return colouredLayout;
},
pattern(config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
dummy() {
return dummyLayout;
},
};
module.exports = {
basicLayout,
messagePassThroughLayout,
patternLayout,
colouredLayout,
coloredLayout: colouredLayout,
dummyLayout,
addLayout(name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout(name, config) {
return layoutMakers[name] && layoutMakers[name](config);
},
};
| 1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./lib/logger.js | /* eslint no-underscore-dangle: ["error", { "allow": ["_log"] }] */
const debug = require('debug')('log4js:logger');
const LoggingEvent = require('./LoggingEvent');
const levels = require('./levels');
const clustering = require('./clustering');
const categories = require('./categories');
const configuration = require('./configuration');
const stackReg = /at (?:(.+)\s+\()?(?:(.+?):(\d+)(?::(\d+))?|([^)]+))\)?/;
function defaultParseCallStack(data, skipIdx = 4) {
try {
const stacklines = data.stack.split('\n').slice(skipIdx);
const lineMatch = stackReg.exec(stacklines[0]);
/* istanbul ignore else: failsafe */
if (lineMatch && lineMatch.length === 6) {
// extract class, function and alias names
let className = '';
let functionName = '';
let functionAlias = '';
if (lineMatch[1] && lineMatch[1] !== '') {
// WARN: this will unset alias if alias is not present.
[functionName, functionAlias] = lineMatch[1]
.replace(/[[\]]/g, '')
.split(' as ');
functionAlias = functionAlias || '';
if (functionName.includes('.'))
[className, functionName] = functionName.split('.');
}
return {
functionName,
fileName: lineMatch[2],
lineNumber: parseInt(lineMatch[3], 10),
columnNumber: parseInt(lineMatch[4], 10),
callStack: stacklines.join('\n'),
className,
functionAlias,
callerName: lineMatch[1] || '',
};
// eslint-disable-next-line no-else-return
} else {
// will never get here unless nodejs has changes to Error
console.error('log4js.logger - defaultParseCallStack error'); // eslint-disable-line no-console
}
} catch (err) {
// will never get error unless nodejs has breaking changes to Error
console.error('log4js.logger - defaultParseCallStack error', err); // eslint-disable-line no-console
}
return null;
}
/**
* Logger to log messages.
* use {@see log4js#getLogger(String)} to get an instance.
*
* @name Logger
* @namespace Log4js
* @param name name of category to log to
* @param level - the loglevel for the category
* @param dispatch - the function which will receive the logevents
*
* @author Stephan Strittmatter
*/
class Logger {
constructor(name) {
if (!name) {
throw new Error('No category provided.');
}
this.category = name;
this.context = {};
this.parseCallStack = defaultParseCallStack;
debug(`Logger created (${this.category}, ${this.level})`);
}
get level() {
return levels.getLevel(
categories.getLevelForCategory(this.category),
levels.OFF
);
}
set level(level) {
categories.setLevelForCategory(
this.category,
levels.getLevel(level, this.level)
);
}
get useCallStack() {
return categories.getEnableCallStackForCategory(this.category);
}
set useCallStack(bool) {
categories.setEnableCallStackForCategory(this.category, bool === true);
}
log(level, ...args) {
const logLevel = levels.getLevel(level);
if (!logLevel) {
if (configuration.validIdentifier(level) && args.length > 0) {
// logLevel not found but of valid signature, WARN before fallback to INFO
this.log(
levels.WARN,
'log4js:logger.log: valid log-level not found as first parameter given:',
level
);
this.log(levels.INFO, `[${level}]`, ...args);
} else {
// apart from fallback, allow .log(...args) to be synonym with .log("INFO", ...args)
this.log(levels.INFO, level, ...args);
}
} else if (this.isLevelEnabled(logLevel)) {
this._log(logLevel, args);
}
}
isLevelEnabled(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
}
_log(level, data) {
debug(`sending log data (${level}) to appenders`);
const loggingEvent = new LoggingEvent(
this.category,
level,
data,
this.context,
this.useCallStack && this.parseCallStack(new Error())
);
clustering.send(loggingEvent);
}
addContext(key, value) {
this.context[key] = value;
}
removeContext(key) {
delete this.context[key];
}
clearContext() {
this.context = {};
}
setParseCallStackFunction(parseFunction) {
this.parseCallStack = parseFunction;
}
}
function addLevelMethods(target) {
const level = levels.getLevel(target);
const levelStrLower = level.toString().toLowerCase();
const levelMethod = levelStrLower.replace(/_([a-z])/g, (g) =>
g[1].toUpperCase()
);
const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
Logger.prototype[`is${isLevelMethod}Enabled`] = function() {
return this.isLevelEnabled(level);
};
Logger.prototype[levelMethod] = function(...args) {
this.log(level, ...args);
};
}
levels.levels.forEach(addLevelMethods);
configuration.addListener(() => {
levels.levels.forEach(addLevelMethods);
});
module.exports = Logger;
| /* eslint no-underscore-dangle: ["error", { "allow": ["_log"] }] */
const debug = require('debug')('log4js:logger');
const LoggingEvent = require('./LoggingEvent');
const levels = require('./levels');
const clustering = require('./clustering');
const categories = require('./categories');
const configuration = require('./configuration');
const stackReg = /at (?:(.+)\s+\()?(?:(.+?):(\d+)(?::(\d+))?|([^)]+))\)?/;
function defaultParseCallStack(data, skipIdx = 4) {
try {
const stacklines = data.stack.split('\n').slice(skipIdx);
const lineMatch = stackReg.exec(stacklines[0]);
/* istanbul ignore else: failsafe */
if (lineMatch && lineMatch.length === 6) {
// extract class, function and alias names
let className = '';
let functionName = '';
let functionAlias = '';
if (lineMatch[1] && lineMatch[1] !== '') {
// WARN: this will unset alias if alias is not present.
[functionName, functionAlias] = lineMatch[1]
.replace(/[[\]]/g, '')
.split(' as ');
functionAlias = functionAlias || '';
if (functionName.includes('.'))
[className, functionName] = functionName.split('.');
}
return {
fileName: lineMatch[2],
lineNumber: parseInt(lineMatch[3], 10),
columnNumber: parseInt(lineMatch[4], 10),
callStack: stacklines.join('\n'),
className,
functionName,
functionAlias,
callerName: lineMatch[1] || '',
};
// eslint-disable-next-line no-else-return
} else {
// will never get here unless nodejs has changes to Error
console.error('log4js.logger - defaultParseCallStack error'); // eslint-disable-line no-console
}
} catch (err) {
// will never get error unless nodejs has breaking changes to Error
console.error('log4js.logger - defaultParseCallStack error', err); // eslint-disable-line no-console
}
return null;
}
/**
* Logger to log messages.
* use {@see log4js#getLogger(String)} to get an instance.
*
* @name Logger
* @namespace Log4js
* @param name name of category to log to
* @param level - the loglevel for the category
* @param dispatch - the function which will receive the logevents
*
* @author Stephan Strittmatter
*/
class Logger {
constructor(name) {
if (!name) {
throw new Error('No category provided.');
}
this.category = name;
this.context = {};
this.parseCallStack = defaultParseCallStack;
debug(`Logger created (${this.category}, ${this.level})`);
}
get level() {
return levels.getLevel(
categories.getLevelForCategory(this.category),
levels.OFF
);
}
set level(level) {
categories.setLevelForCategory(
this.category,
levels.getLevel(level, this.level)
);
}
get useCallStack() {
return categories.getEnableCallStackForCategory(this.category);
}
set useCallStack(bool) {
categories.setEnableCallStackForCategory(this.category, bool === true);
}
log(level, ...args) {
const logLevel = levels.getLevel(level);
if (!logLevel) {
if (configuration.validIdentifier(level) && args.length > 0) {
// logLevel not found but of valid signature, WARN before fallback to INFO
this.log(
levels.WARN,
'log4js:logger.log: valid log-level not found as first parameter given:',
level
);
this.log(levels.INFO, `[${level}]`, ...args);
} else {
// apart from fallback, allow .log(...args) to be synonym with .log("INFO", ...args)
this.log(levels.INFO, level, ...args);
}
} else if (this.isLevelEnabled(logLevel)) {
this._log(logLevel, args);
}
}
isLevelEnabled(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
}
_log(level, data) {
debug(`sending log data (${level}) to appenders`);
const loggingEvent = new LoggingEvent(
this.category,
level,
data,
this.context,
this.useCallStack && this.parseCallStack(new Error())
);
clustering.send(loggingEvent);
}
addContext(key, value) {
this.context[key] = value;
}
removeContext(key) {
delete this.context[key];
}
clearContext() {
this.context = {};
}
setParseCallStackFunction(parseFunction) {
this.parseCallStack = parseFunction;
}
}
function addLevelMethods(target) {
const level = levels.getLevel(target);
const levelStrLower = level.toString().toLowerCase();
const levelMethod = levelStrLower.replace(/_([a-z])/g, (g) =>
g[1].toUpperCase()
);
const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
Logger.prototype[`is${isLevelMethod}Enabled`] = function() {
return this.isLevelEnabled(level);
};
Logger.prototype[levelMethod] = function(...args) {
this.log(level, ...args);
};
}
levels.levels.forEach(addLevelMethods);
configuration.addListener(() => {
levels.levels.forEach(addLevelMethods);
});
module.exports = Logger;
| 1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/LoggingEvent-test.js | const flatted = require('flatted');
const { test } = require('tap');
const LoggingEvent = require('../../lib/LoggingEvent');
const levels = require('../../lib/levels');
test('LoggingEvent', (batch) => {
batch.test('should serialise to flatted', (t) => {
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message', parseInt('abc', 10), 1 / 0, -1 / 0, undefined],
{
user: 'bob',
}
);
// set the event date to a known value
event.startTime = new Date(Date.UTC(2018, 1, 4, 18, 30, 23, 10));
const rehydratedEvent = flatted.parse(event.serialise());
t.equal(rehydratedEvent.startTime, '2018-02-04T18:30:23.010Z');
t.equal(rehydratedEvent.categoryName, 'cheese');
t.equal(rehydratedEvent.level.levelStr, 'DEBUG');
t.equal(rehydratedEvent.data.length, 5);
t.equal(rehydratedEvent.data[0], 'log message');
t.equal(rehydratedEvent.data[1], 'NaN');
t.equal(rehydratedEvent.data[2], 'Infinity');
t.equal(rehydratedEvent.data[3], '-Infinity');
t.equal(rehydratedEvent.data[4], 'undefined');
t.equal(rehydratedEvent.context.user, 'bob');
t.end();
});
batch.test('should deserialise from flatted', (t) => {
const dehydratedEvent = flatted.stringify({
startTime: '2018-02-04T10:25:23.010Z',
categoryName: 'biscuits',
level: {
levelStr: 'INFO',
},
data: ['some log message', { x: 1 }],
context: { thing: 'otherThing' },
pid: '1234',
functionName: 'bound',
fileName: 'domain.js',
lineNumber: 421,
columnNumber: 15,
callStack: 'at bound (domain.js:421:15)\n',
});
const event = LoggingEvent.deserialise(dehydratedEvent);
t.type(event, LoggingEvent);
t.same(event.startTime, new Date(Date.UTC(2018, 1, 4, 10, 25, 23, 10)));
t.equal(event.categoryName, 'biscuits');
t.same(event.level, levels.INFO);
t.equal(event.data[0], 'some log message');
t.equal(event.data[1].x, 1);
t.equal(event.context.thing, 'otherThing');
t.equal(event.pid, '1234');
t.equal(event.functionName, 'bound');
t.equal(event.fileName, 'domain.js');
t.equal(event.lineNumber, 421);
t.equal(event.columnNumber, 15);
t.equal(event.callStack, 'at bound (domain.js:421:15)\n');
t.end();
});
batch.test('Should correct construct with/without location info', (t) => {
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = '/log4js-node/test/tap/layouts-test.js';
const lineNumber = 1;
const columnNumber = 14;
const className = '';
const functionName = '';
const functionAlias = '';
const callerName = '';
const location = {
functionName,
fileName,
lineNumber,
columnNumber,
callStack,
className,
functionAlias,
callerName,
};
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message'],
{ user: 'bob' },
location
);
t.equal(event.functionName, functionName);
t.equal(event.fileName, fileName);
t.equal(event.lineNumber, lineNumber);
t.equal(event.columnNumber, columnNumber);
t.equal(event.callStack, callStack);
t.equal(event.className, className);
t.equal(event.functionAlias, functionAlias);
t.equal(event.callerName, callerName);
const event2 = new LoggingEvent('cheese', levels.DEBUG, ['log message'], {
user: 'bob',
});
t.equal(event2.fileName, undefined);
t.equal(event2.lineNumber, undefined);
t.equal(event2.columnNumber, undefined);
t.equal(event2.callStack, undefined);
t.equal(event2.functionName, undefined);
t.equal(event2.className, undefined);
t.equal(event2.functionAlias, undefined);
t.equal(event2.callerName, undefined);
t.end();
});
batch.test('Should contain class, method and alias names', (t) => {
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at Foo.bar [as baz] (repl:1:14)\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = '/log4js-node/test/tap/layouts-test.js';
const lineNumber = 1;
const columnNumber = 14;
const className = 'Foo';
const functionName = 'bar';
const functionAlias = 'baz';
const callerName = 'Foo.bar [as baz]';
const location = {
functionName,
fileName,
lineNumber,
columnNumber,
callStack,
className,
functionAlias,
callerName,
};
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message'],
{ user: 'bob' },
location
);
t.equal(event.functionName, functionName);
t.equal(event.fileName, fileName);
t.equal(event.lineNumber, lineNumber);
t.equal(event.columnNumber, columnNumber);
t.equal(event.callStack, callStack);
t.equal(event.className, className);
t.equal(event.functionAlias, functionAlias);
t.equal(event.callerName, callerName);
t.end();
});
batch.end();
});
| const flatted = require('flatted');
const { test } = require('tap');
const LoggingEvent = require('../../lib/LoggingEvent');
const levels = require('../../lib/levels');
test('LoggingEvent', (batch) => {
batch.test('should serialise to flatted', (t) => {
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message', parseInt('abc', 10), 1 / 0, -1 / 0, undefined],
{
user: 'bob',
}
);
// set the event date to a known value
event.startTime = new Date(Date.UTC(2018, 1, 4, 18, 30, 23, 10));
const rehydratedEvent = flatted.parse(event.serialise());
t.equal(rehydratedEvent.startTime, '2018-02-04T18:30:23.010Z');
t.equal(rehydratedEvent.categoryName, 'cheese');
t.equal(rehydratedEvent.level.levelStr, 'DEBUG');
t.equal(rehydratedEvent.data.length, 5);
t.equal(rehydratedEvent.data[0], 'log message');
t.equal(rehydratedEvent.data[1], 'NaN');
t.equal(rehydratedEvent.data[2], 'Infinity');
t.equal(rehydratedEvent.data[3], '-Infinity');
t.equal(rehydratedEvent.data[4], 'undefined');
t.equal(rehydratedEvent.context.user, 'bob');
t.end();
});
batch.test('should deserialise from flatted', (t) => {
const dehydratedEvent = flatted.stringify({
startTime: '2018-02-04T10:25:23.010Z',
categoryName: 'biscuits',
level: {
levelStr: 'INFO',
},
data: ['some log message', { x: 1 }],
context: { thing: 'otherThing' },
pid: '1234',
functionName: 'bound',
fileName: 'domain.js',
lineNumber: 421,
columnNumber: 15,
callStack: 'at bound (domain.js:421:15)\n',
});
const event = LoggingEvent.deserialise(dehydratedEvent);
t.type(event, LoggingEvent);
t.same(event.startTime, new Date(Date.UTC(2018, 1, 4, 10, 25, 23, 10)));
t.equal(event.categoryName, 'biscuits');
t.same(event.level, levels.INFO);
t.equal(event.data[0], 'some log message');
t.equal(event.data[1].x, 1);
t.equal(event.context.thing, 'otherThing');
t.equal(event.pid, '1234');
t.equal(event.functionName, 'bound');
t.equal(event.fileName, 'domain.js');
t.equal(event.lineNumber, 421);
t.equal(event.columnNumber, 15);
t.equal(event.callStack, 'at bound (domain.js:421:15)\n');
t.end();
});
batch.test('Should correct construct with/without location info', (t) => {
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = '/log4js-node/test/tap/layouts-test.js';
const lineNumber = 1;
const columnNumber = 14;
const className = '';
const functionName = '';
const functionAlias = '';
const callerName = '';
const location = {
fileName,
lineNumber,
columnNumber,
callStack,
className,
functionName,
functionAlias,
callerName,
};
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message'],
{ user: 'bob' },
location
);
t.equal(event.fileName, fileName);
t.equal(event.lineNumber, lineNumber);
t.equal(event.columnNumber, columnNumber);
t.equal(event.callStack, callStack);
t.equal(event.className, className);
t.equal(event.functionName, functionName);
t.equal(event.functionAlias, functionAlias);
t.equal(event.callerName, callerName);
const event2 = new LoggingEvent('cheese', levels.DEBUG, ['log message'], {
user: 'bob',
});
t.equal(event2.fileName, undefined);
t.equal(event2.lineNumber, undefined);
t.equal(event2.columnNumber, undefined);
t.equal(event2.callStack, undefined);
t.equal(event2.className, undefined);
t.equal(event2.functionName, undefined);
t.equal(event2.functionAlias, undefined);
t.equal(event2.callerName, undefined);
t.end();
});
batch.test('Should contain class, method and alias names', (t) => {
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at Foo.bar [as baz] (repl:1:14)\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = '/log4js-node/test/tap/layouts-test.js';
const lineNumber = 1;
const columnNumber = 14;
const className = 'Foo';
const functionName = 'bar';
const functionAlias = 'baz';
const callerName = 'Foo.bar [as baz]';
const location = {
fileName,
lineNumber,
columnNumber,
callStack,
className,
functionName,
functionAlias,
callerName,
};
const event = new LoggingEvent(
'cheese',
levels.DEBUG,
['log message'],
{ user: 'bob' },
location
);
t.equal(event.fileName, fileName);
t.equal(event.lineNumber, lineNumber);
t.equal(event.columnNumber, columnNumber);
t.equal(event.callStack, callStack);
t.equal(event.className, className);
t.equal(event.functionName, functionName);
t.equal(event.functionAlias, functionAlias);
t.equal(event.callerName, callerName);
t.end();
});
batch.end();
});
| 1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/server-test.js | const { test } = require('tap');
const net = require('net');
const log4js = require('../../lib/log4js');
const vcr = require('../../lib/appenders/recording');
const levels = require('../../lib/levels');
const LoggingEvent = require('../../lib/LoggingEvent');
test('TCP Server', (batch) => {
batch.test(
'should listen for TCP messages and re-send via process.send',
(t) => {
log4js.configure({
appenders: {
vcr: { type: 'recording' },
tcp: { type: 'tcp-server', port: 5678 },
},
categories: {
default: { appenders: ['vcr'], level: 'debug' },
},
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5678, () => {
socket.write(
`${new LoggingEvent(
'test-category',
levels.INFO,
['something'],
{}
).serialise()}__LOG4JS__${new LoggingEvent(
'test-category',
levels.INFO,
['something else'],
{}
).serialise()}__LOG4JS__some nonsense__LOG4JS__{"some":"json"}__LOG4JS__`,
() => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 4);
t.match(logs[0], {
data: ['something'],
categoryName: 'test-category',
level: { levelStr: 'INFO' },
context: {},
});
t.match(logs[1], {
data: ['something else'],
categoryName: 'test-category',
level: { levelStr: 'INFO' },
context: {},
});
t.match(logs[2], {
data: [
'Unable to parse log:',
'some nonsense',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.match(logs[3], {
data: [
'Unable to parse log:',
'{"some":"json"}',
'because: ',
TypeError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.end();
});
}, 100);
}
);
});
socket.unref();
}, 100);
}
);
batch.test('sending incomplete messages in chunks', (t) => {
log4js.configure({
appenders: {
vcr: { type: 'recording' },
tcp: { type: 'tcp-server' },
},
categories: {
default: { appenders: ['vcr'], level: 'debug' },
},
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5000, () => {
const syncWrite = (dataArray, finalCallback) => {
if (!Array.isArray(dataArray)) {
dataArray = [dataArray];
}
if (typeof finalCallback !== 'function') {
finalCallback = () => {};
}
setTimeout(() => {
if (!dataArray.length) {
finalCallback();
} else if (dataArray.length === 1) {
socket.write(dataArray.shift(), finalCallback);
} else {
socket.write(dataArray.shift(), () => {
syncWrite(dataArray, finalCallback);
});
}
}, 100);
};
const dataArray = [
'__LOG4JS__',
'Hello__LOG4JS__World',
'__LOG4JS__',
'testing nonsense',
`__LOG4JS__more nonsense__LOG4JS__`,
];
const finalCallback = () => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 8);
t.match(logs[4], {
data: [
'Unable to parse log:',
'Hello',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.match(logs[5], {
data: [
'Unable to parse log:',
'World',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.match(logs[6], {
data: [
'Unable to parse log:',
'testing nonsense',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.match(logs[7], {
data: [
'Unable to parse log:',
'more nonsense',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.end();
});
}, 100);
};
syncWrite(dataArray, finalCallback);
});
socket.unref();
}, 100);
});
batch.end();
});
| const { test } = require('tap');
const net = require('net');
const log4js = require('../../lib/log4js');
const vcr = require('../../lib/appenders/recording');
const levels = require('../../lib/levels');
const LoggingEvent = require('../../lib/LoggingEvent');
test('TCP Server', (batch) => {
batch.test(
'should listen for TCP messages and re-send via process.send',
(t) => {
log4js.configure({
appenders: {
vcr: { type: 'recording' },
tcp: { type: 'tcp-server', port: 5678 },
},
categories: {
default: { appenders: ['vcr'], level: 'debug' },
},
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5678, () => {
socket.write(
`${new LoggingEvent(
'test-category',
levels.INFO,
['something'],
{}
).serialise()}__LOG4JS__${new LoggingEvent(
'test-category',
levels.INFO,
['something else'],
{}
).serialise()}__LOG4JS__some nonsense__LOG4JS__{"some":"json"}__LOG4JS__`,
() => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 4);
t.match(logs[0], {
data: ['something'],
categoryName: 'test-category',
level: { levelStr: 'INFO' },
context: {},
});
t.match(logs[1], {
data: ['something else'],
categoryName: 'test-category',
level: { levelStr: 'INFO' },
context: {},
});
t.match(logs[2], {
data: [
'Unable to parse log:',
'some nonsense',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.match(logs[3], {
data: [
'Unable to parse log:',
'{"some":"json"}',
'because: ',
TypeError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.end();
});
}, 100);
}
);
});
socket.unref();
}, 100);
}
);
batch.test('sending incomplete messages in chunks', (t) => {
log4js.configure({
appenders: {
vcr: { type: 'recording' },
tcp: { type: 'tcp-server' },
},
categories: {
default: { appenders: ['vcr'], level: 'debug' },
},
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5000, () => {
const syncWrite = (dataArray, finalCallback) => {
if (!Array.isArray(dataArray)) {
dataArray = [dataArray];
}
if (typeof finalCallback !== 'function') {
finalCallback = () => {};
}
setTimeout(() => {
if (!dataArray.length) {
finalCallback();
} else if (dataArray.length === 1) {
socket.write(dataArray.shift(), finalCallback);
} else {
socket.write(dataArray.shift(), () => {
syncWrite(dataArray, finalCallback);
});
}
}, 100);
};
const dataArray = [
'__LOG4JS__',
'Hello__LOG4JS__World',
'__LOG4JS__',
'testing nonsense',
`__LOG4JS__more nonsense__LOG4JS__`,
];
const finalCallback = () => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 8);
t.match(logs[4], {
data: [
'Unable to parse log:',
'Hello',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.match(logs[5], {
data: [
'Unable to parse log:',
'World',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.match(logs[6], {
data: [
'Unable to parse log:',
'testing nonsense',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.match(logs[7], {
data: [
'Unable to parse log:',
'more nonsense',
'because: ',
SyntaxError,
],
categoryName: 'log4js',
level: { levelStr: 'ERROR' },
context: {},
});
t.end();
});
}, 100);
};
syncWrite(dataArray, finalCallback);
});
socket.unref();
}, 100);
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/layouts-test.js | const { test } = require('tap');
const debug = require('debug');
const os = require('os');
const path = require('path');
const { EOL } = os;
// used for patternLayout tests.
function testPattern(assert, layout, event, tokens, pattern, value) {
assert.equal(layout(pattern, tokens)(event), value);
}
test('log4js layouts', (batch) => {
batch.test('colouredLayout', (t) => {
const layout = require('../../lib/layouts').colouredLayout;
t.test('should apply level colour codes to output', (assert) => {
const output = layout({
data: ['nonsense'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
toString() {
return 'ERROR';
},
colour: 'red',
},
});
assert.equal(
output,
'\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'
);
assert.end();
});
t.test(
'should support the console.log format for the message',
(assert) => {
const output = layout({
data: ['thing %d', 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
toString() {
return 'ERROR';
},
colour: 'red',
},
});
assert.equal(
output,
'\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2'
);
assert.end();
}
);
t.end();
});
batch.test('messagePassThroughLayout', (t) => {
const layout = require('../../lib/layouts').messagePassThroughLayout;
t.equal(
layout({
data: ['nonsense'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'nonsense',
'should take a logevent and output only the message'
);
t.equal(
layout({
data: ['thing %d', 1, 'cheese'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'thing 1 cheese',
'should support the console.log format for the message'
);
t.equal(
layout({
data: [{ thing: 1 }],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'{ thing: 1 }',
'should output the first item even if it is not a string'
);
t.match(
layout({
data: [new Error()],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
/at (Test\.batch\.test(\.t)?|Test\.<anonymous>)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
'regexp did not return a match - should print the stacks of a passed error objects'
);
t.test('with passed augmented errors', (assert) => {
const e = new Error('My Unique Error Message');
e.augmented = 'My Unique attribute value';
e.augObj = { at1: 'at2' };
const layoutOutput = layout({
data: [e],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
});
assert.match(
layoutOutput,
/Error: My Unique Error Message/,
'should print the contained error message'
);
assert.match(
layoutOutput,
/augmented:\s'My Unique attribute value'/,
'should print error augmented string attributes'
);
assert.match(
layoutOutput,
/augObj:\s\{ at1: 'at2' \}/,
'should print error augmented object attributes'
);
assert.end();
});
t.end();
});
batch.test('basicLayout', (t) => {
const layout = require('../../lib/layouts').basicLayout;
const event = {
data: ['this is a test'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'tests',
level: {
toString() {
return 'DEBUG';
},
},
};
t.equal(
layout(event),
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test'
);
t.test(
'should output a stacktrace, message if the event has an error attached',
(assert) => {
let i;
const error = new Error('Some made-up error');
const stack = error.stack.split(/\n/);
event.data = ['this is a test', error];
const output = layout(event);
const lines = output.split(/\n/);
assert.equal(lines.length, stack.length);
assert.equal(
lines[0],
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error'
);
for (i = 1; i < stack.length; i++) {
assert.equal(lines[i], stack[i]);
}
assert.end();
}
);
t.test(
'should output any extra data in the log event as util.inspect strings',
(assert) => {
event.data = [
'this is a test',
{
name: 'Cheese',
message: 'Gorgonzola smells.',
},
];
const output = layout(event);
assert.equal(
output,
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test ' +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
assert.end();
}
);
t.end();
});
batch.test('dummyLayout', (t) => {
const layout = require('../../lib/layouts').dummyLayout;
t.test('should output just the first element of the log data', (assert) => {
const event = {
data: ['this is the first value', 'this is not'],
startTime: new Date('2010-12-05 14:18:30.045'),
categoryName: 'multiple.levels.of.tests',
level: {
toString() {
return 'DEBUG';
},
colour: 'cyan',
},
};
assert.equal(layout(event), 'this is the first value');
assert.end();
});
t.end();
});
batch.test('patternLayout', (t) => {
const originalListener = process.listeners('warning')[
process.listeners('warning').length - 1
];
const warningListener = (error) => {
if (error.name === 'DeprecationWarning') {
if (
error.code.startsWith('log4js-node-DEP0003') ||
error.code.startsWith('log4js-node-DEP0004')
) {
return;
}
}
originalListener(error);
};
process.off('warning', originalListener);
process.on('warning', warningListener);
const debugWasEnabled = debug.enabled('log4js:layouts');
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
const originalNamespace = debug.disable();
debug.enable(`${originalNamespace}, log4js:layouts`);
batch.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off('warning', warningListener);
process.on('warning', originalListener);
});
process.stderr.write = originalWrite;
debug.enable(originalNamespace);
});
const tokens = {
testString: 'testStringToken',
testFunction() {
return 'testFunctionToken';
},
fnThatUsesLogEvent(logEvent) {
return logEvent.level.toString();
},
};
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at Foo.bar [as baz] (repl:1:14)\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = path.normalize('/log4js-node/test/tap/layouts-test.js');
const lineNumber = 1;
const columnNumber = 14;
const className = 'Foo';
const functionName = 'bar';
const functionAlias = 'baz';
const callerName = 'Foo.bar [as baz]';
const event = {
data: ['this is a test'],
startTime: new Date('2010-12-05 14:18:30.045'),
categoryName: 'multiple.levels.of.tests',
level: {
toString() {
return 'DEBUG';
},
colour: 'cyan',
},
context: tokens,
// location
callStack,
fileName,
lineNumber,
columnNumber,
className,
functionName,
functionAlias,
callerName,
};
event.startTime.getTimezoneOffset = () => -600;
const layout = require('../../lib/layouts').patternLayout;
t.test(
'should default to "time logLevel loggerName - message"',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
null,
`14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
);
assert.end();
}
);
t.test('%r should output time only', (assert) => {
testPattern(assert, layout, event, tokens, '%r', '14:18:30');
assert.end();
});
t.test('%p should output the log level', (assert) => {
testPattern(assert, layout, event, tokens, '%p', 'DEBUG');
assert.end();
});
t.test('%c should output the log category', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%c',
'multiple.levels.of.tests'
);
assert.end();
});
t.test('%m should output the log data', (assert) => {
testPattern(assert, layout, event, tokens, '%m', 'this is a test');
assert.end();
});
t.test('%n should output a new line', (assert) => {
testPattern(assert, layout, event, tokens, '%n', EOL);
assert.end();
});
t.test('%h should output hostname', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%h',
os.hostname().toString()
);
assert.end();
});
t.test('%z should output pid', (assert) => {
testPattern(assert, layout, event, tokens, '%z', process.pid.toString());
assert.end();
});
t.test('%z should pick up pid from log event if present', (assert) => {
event.pid = '1234';
testPattern(assert, layout, event, tokens, '%z', '1234');
delete event.pid;
assert.end();
});
t.test('%y should output pid (was cluster info)', (assert) => {
testPattern(assert, layout, event, tokens, '%y', process.pid.toString());
assert.end();
});
t.test(
'%c should handle category names like java-style package names',
(assert) => {
testPattern(assert, layout, event, tokens, '%c{1}', 'tests');
testPattern(assert, layout, event, tokens, '%c{2}', 'of.tests');
testPattern(assert, layout, event, tokens, '%c{3}', 'levels.of.tests');
testPattern(
assert,
layout,
event,
tokens,
'%c{4}',
'multiple.levels.of.tests'
);
testPattern(
assert,
layout,
event,
tokens,
'%c{5}',
'multiple.levels.of.tests'
);
testPattern(
assert,
layout,
event,
tokens,
'%c{99}',
'multiple.levels.of.tests'
);
assert.end();
}
);
t.test('%d should output the date in ISO8601 format', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%d',
'2010-12-05T14:18:30.045'
);
assert.end();
});
t.test('%d should allow for format specification', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%d{ISO8601}',
'2010-12-05T14:18:30.045'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{ISO8601_WITH_TZ_OFFSET}',
'2010-12-05T14:18:30.045+10:00'
);
const DEP0003 = debugLogs.filter(
(e) => e.indexOf('log4js-node-DEP0003') > -1
).length;
testPattern(
assert,
layout,
event,
tokens,
'%d{ABSOLUTE}', // deprecated
'14:18:30.045'
);
assert.equal(
debugLogs.filter((e) => e.indexOf('log4js-node-DEP0003') > -1).length,
DEP0003 + 1,
'deprecation log4js-node-DEP0003 emitted'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{ABSOLUTETIME}',
'14:18:30.045'
);
const DEP0004 = debugLogs.filter(
(e) => e.indexOf('log4js-node-DEP0004') > -1
).length;
testPattern(
assert,
layout,
event,
tokens,
'%d{DATE}', // deprecated
'05 12 2010 14:18:30.045'
);
assert.equal(
debugLogs.filter((e) => e.indexOf('log4js-node-DEP0004') > -1).length,
DEP0004 + 1,
'deprecation log4js-node-DEP0004 emitted'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{DATETIME}',
'05 12 2010 14:18:30.045'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yy MM dd hh mm ss}',
'10 12 05 14 18 30'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yyyy MM dd}',
'2010 12 05'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yyyy MM dd hh mm ss SSS}',
'2010 12 05 14 18 30 045'
);
assert.end();
});
t.test('%% should output %', (assert) => {
testPattern(assert, layout, event, tokens, '%%', '%');
assert.end();
});
t.test('%f should output filename', (assert) => {
testPattern(assert, layout, event, tokens, '%f', fileName);
assert.end();
});
t.test('%f should handle filename depth', (assert) => {
testPattern(assert, layout, event, tokens, '%f{1}', 'layouts-test.js');
testPattern(
assert,
layout,
event,
tokens,
'%f{2}',
path.join('tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{3}',
path.join('test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{4}',
path.join('log4js-node', 'test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{5}',
path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{99}',
path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
);
assert.end();
});
t.test('%f should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%.5f', fileName.slice(0, 5));
testPattern(
assert,
layout,
event,
tokens,
'%20f{1}',
' layouts-test.js'
);
testPattern(
assert,
layout,
event,
tokens,
'%30.30f{2}',
` ${path.join('tap', 'layouts-test.js')}`
);
testPattern(assert, layout, event, tokens, '%10.-5f{1}', ' st.js');
assert.end();
});
t.test('%l should output line number', (assert) => {
testPattern(assert, layout, event, tokens, '%l', lineNumber.toString());
assert.end();
});
t.test('%l should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%5.10l', ' 1');
testPattern(assert, layout, event, tokens, '%.5l', '1');
testPattern(assert, layout, event, tokens, '%.-5l', '1');
testPattern(assert, layout, event, tokens, '%-5l', '1 ');
assert.end();
});
t.test('%o should output column postion', (assert) => {
testPattern(assert, layout, event, tokens, '%o', columnNumber.toString());
assert.end();
});
t.test('%o should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%5.10o', ' 14');
testPattern(assert, layout, event, tokens, '%.5o', '14');
testPattern(assert, layout, event, tokens, '%.1o', '1');
testPattern(assert, layout, event, tokens, '%.-1o', '4');
testPattern(assert, layout, event, tokens, '%-5o', '14 ');
assert.end();
});
t.test('%s should output stack', (assert) => {
testPattern(assert, layout, event, tokens, '%s', callStack);
assert.end();
});
t.test(
'%f should output empty string when fileName not exist',
(assert) => {
delete event.fileName;
testPattern(assert, layout, event, tokens, '%f', '');
assert.end();
}
);
t.test(
'%l should output empty string when lineNumber not exist',
(assert) => {
delete event.lineNumber;
testPattern(assert, layout, event, tokens, '%l', '');
assert.end();
}
);
t.test(
'%o should output empty string when columnNumber not exist',
(assert) => {
delete event.columnNumber;
testPattern(assert, layout, event, tokens, '%o', '');
assert.end();
}
);
t.test(
'%s should output empty string when callStack not exist',
(assert) => {
delete event.callStack;
testPattern(assert, layout, event, tokens, '%s', '');
assert.end();
}
);
t.test('should output anything not preceded by % as literal', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'blah blah blah',
'blah blah blah'
);
assert.end();
});
t.test(
'should output the original string if no replacer matches the token',
(assert) => {
testPattern(assert, layout, event, tokens, '%a{3}', 'a{3}');
assert.end();
}
);
t.test('should handle complicated patterns', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n', // deprecated
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
testPattern(
assert,
layout,
event,
tokens,
'%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n',
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
assert.end();
});
t.test('should truncate fields if specified', (assert) => {
testPattern(assert, layout, event, tokens, '%.4m', 'this');
testPattern(assert, layout, event, tokens, '%.7m', 'this is');
testPattern(assert, layout, event, tokens, '%.9m', 'this is a');
testPattern(assert, layout, event, tokens, '%.14m', 'this is a test');
testPattern(
assert,
layout,
event,
tokens,
'%.2919102m',
'this is a test'
);
testPattern(assert, layout, event, tokens, '%.-4m', 'test');
assert.end();
});
t.test('should pad fields if specified', (assert) => {
testPattern(assert, layout, event, tokens, '%10p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%8p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%6p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%4p', 'DEBUG');
testPattern(assert, layout, event, tokens, '%-4p', 'DEBUG');
testPattern(assert, layout, event, tokens, '%-6p', 'DEBUG ');
testPattern(assert, layout, event, tokens, '%-8p', 'DEBUG ');
testPattern(assert, layout, event, tokens, '%-10p', 'DEBUG ');
assert.end();
});
t.test('%[%r%] should output colored time', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%[%r%]',
'\x1B[36m14:18:30\x1B[39m'
);
assert.end();
});
t.test(
'%x{testString} should output the string stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{testString}',
'testStringToken'
);
assert.end();
}
);
t.test(
'%x{testFunction} should output the result of the function stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{testFunction}',
'testFunctionToken'
);
assert.end();
}
);
t.test(
'%x{doesNotExist} should output the string stored in tokens',
(assert) => {
testPattern(assert, layout, event, tokens, '%x{doesNotExist}', 'null');
assert.end();
}
);
t.test(
'%x{fnThatUsesLogEvent} should be able to use the logEvent',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{fnThatUsesLogEvent}',
'DEBUG'
);
assert.end();
}
);
t.test('%x should output the string stored in tokens', (assert) => {
testPattern(assert, layout, event, tokens, '%x', 'null');
assert.end();
});
t.test(
'%X{testString} should output the string stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{testString}',
'testStringToken'
);
assert.end();
}
);
t.test(
'%X{testFunction} should output the result of the function stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{testFunction}',
'testFunctionToken'
);
assert.end();
}
);
t.test(
'%X{doesNotExist} should output the string stored in tokens',
(assert) => {
testPattern(assert, layout, event, {}, '%X{doesNotExist}', 'null');
assert.end();
}
);
t.test(
'%X{fnThatUsesLogEvent} should be able to use the logEvent',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{fnThatUsesLogEvent}',
'DEBUG'
);
assert.end();
}
);
t.test('%X should output the string stored in tokens', (assert) => {
testPattern(assert, layout, event, {}, '%X', 'null');
assert.end();
});
t.test('%M should output function name', (assert) => {
testPattern(assert, layout, event, tokens, '%M', functionName);
assert.end();
});
t.test(
'%M should output empty string when functionName not exist',
(assert) => {
delete event.functionName;
testPattern(assert, layout, event, tokens, '%M', '');
assert.end();
}
);
t.test('%C should output class name', (assert) => {
testPattern(assert, layout, event, tokens, '%C', className);
assert.end();
});
t.test(
'%C should output empty string when className not exist',
(assert) => {
delete event.className;
testPattern(assert, layout, event, tokens, '%C', '');
assert.end();
}
);
t.test('%A should output function alias', (assert) => {
testPattern(assert, layout, event, tokens, '%A', functionAlias);
assert.end();
});
t.test(
'%A should output empty string when functionAlias not exist',
(assert) => {
delete event.functionAlias;
testPattern(assert, layout, event, tokens, '%A', '');
assert.end();
}
);
t.test('%F should output fully qualified caller name', (assert) => {
testPattern(assert, layout, event, tokens, '%F', callerName);
assert.end();
});
t.test(
'%F should output empty string when callerName not exist',
(assert) => {
delete event.callerName;
testPattern(assert, layout, event, tokens, '%F', '');
assert.end();
}
);
t.end();
});
batch.test('layout makers', (t) => {
const layouts = require('../../lib/layouts');
t.test('should have a maker for each layout', (assert) => {
assert.ok(layouts.layout('messagePassThrough'));
assert.ok(layouts.layout('basic'));
assert.ok(layouts.layout('colored'));
assert.ok(layouts.layout('coloured'));
assert.ok(layouts.layout('pattern'));
assert.ok(layouts.layout('dummy'));
assert.end();
});
t.test(
'layout pattern maker should pass pattern and tokens to layout from config',
(assert) => {
let layout = layouts.layout('pattern', { pattern: '%%' });
assert.equal(layout({}), '%');
layout = layouts.layout('pattern', {
pattern: '%x{testStringToken}',
tokens: { testStringToken: 'cheese' },
});
assert.equal(layout({}), 'cheese');
assert.end();
}
);
t.end();
});
batch.test('add layout', (t) => {
const layouts = require('../../lib/layouts');
t.test('should be able to add a layout', (assert) => {
layouts.addLayout('test_layout', (config) => {
assert.equal(config, 'test_config');
return function(logEvent) {
return `TEST LAYOUT >${logEvent.data}`;
};
});
const serializer = layouts.layout('test_layout', 'test_config');
assert.ok(serializer);
assert.equal(serializer({ data: 'INPUT' }), 'TEST LAYOUT >INPUT');
assert.end();
});
t.end();
});
batch.end();
});
| const { test } = require('tap');
const debug = require('debug');
const os = require('os');
const path = require('path');
const { EOL } = os;
// used for patternLayout tests.
function testPattern(assert, layout, event, tokens, pattern, value) {
assert.equal(layout(pattern, tokens)(event), value);
}
test('log4js layouts', (batch) => {
batch.test('colouredLayout', (t) => {
const layout = require('../../lib/layouts').colouredLayout;
t.test('should apply level colour codes to output', (assert) => {
const output = layout({
data: ['nonsense'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
toString() {
return 'ERROR';
},
colour: 'red',
},
});
assert.equal(
output,
'\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'
);
assert.end();
});
t.test(
'should support the console.log format for the message',
(assert) => {
const output = layout({
data: ['thing %d', 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
toString() {
return 'ERROR';
},
colour: 'red',
},
});
assert.equal(
output,
'\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2'
);
assert.end();
}
);
t.end();
});
batch.test('messagePassThroughLayout', (t) => {
const layout = require('../../lib/layouts').messagePassThroughLayout;
t.equal(
layout({
data: ['nonsense'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'nonsense',
'should take a logevent and output only the message'
);
t.equal(
layout({
data: ['thing %d', 1, 'cheese'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'thing 1 cheese',
'should support the console.log format for the message'
);
t.equal(
layout({
data: [{ thing: 1 }],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
'{ thing: 1 }',
'should output the first item even if it is not a string'
);
t.match(
layout({
data: [new Error()],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
}),
/at (Test\.batch\.test(\.t)?|Test\.<anonymous>)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
'regexp did not return a match - should print the stacks of a passed error objects'
);
t.test('with passed augmented errors', (assert) => {
const e = new Error('My Unique Error Message');
e.augmented = 'My Unique attribute value';
e.augObj = { at1: 'at2' };
const layoutOutput = layout({
data: [e],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'cheese',
level: {
colour: 'green',
toString() {
return 'ERROR';
},
},
});
assert.match(
layoutOutput,
/Error: My Unique Error Message/,
'should print the contained error message'
);
assert.match(
layoutOutput,
/augmented:\s'My Unique attribute value'/,
'should print error augmented string attributes'
);
assert.match(
layoutOutput,
/augObj:\s\{ at1: 'at2' \}/,
'should print error augmented object attributes'
);
assert.end();
});
t.end();
});
batch.test('basicLayout', (t) => {
const layout = require('../../lib/layouts').basicLayout;
const event = {
data: ['this is a test'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: 'tests',
level: {
toString() {
return 'DEBUG';
},
},
};
t.equal(
layout(event),
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test'
);
t.test(
'should output a stacktrace, message if the event has an error attached',
(assert) => {
let i;
const error = new Error('Some made-up error');
const stack = error.stack.split(/\n/);
event.data = ['this is a test', error];
const output = layout(event);
const lines = output.split(/\n/);
assert.equal(lines.length, stack.length);
assert.equal(
lines[0],
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error'
);
for (i = 1; i < stack.length; i++) {
assert.equal(lines[i], stack[i]);
}
assert.end();
}
);
t.test(
'should output any extra data in the log event as util.inspect strings',
(assert) => {
event.data = [
'this is a test',
{
name: 'Cheese',
message: 'Gorgonzola smells.',
},
];
const output = layout(event);
assert.equal(
output,
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test ' +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
assert.end();
}
);
t.end();
});
batch.test('dummyLayout', (t) => {
const layout = require('../../lib/layouts').dummyLayout;
t.test('should output just the first element of the log data', (assert) => {
const event = {
data: ['this is the first value', 'this is not'],
startTime: new Date('2010-12-05 14:18:30.045'),
categoryName: 'multiple.levels.of.tests',
level: {
toString() {
return 'DEBUG';
},
colour: 'cyan',
},
};
assert.equal(layout(event), 'this is the first value');
assert.end();
});
t.end();
});
batch.test('patternLayout', (t) => {
const originalListener = process.listeners('warning')[
process.listeners('warning').length - 1
];
const warningListener = (error) => {
if (error.name === 'DeprecationWarning') {
if (
error.code.startsWith('log4js-node-DEP0003') ||
error.code.startsWith('log4js-node-DEP0004')
) {
return;
}
}
originalListener(error);
};
process.off('warning', originalListener);
process.on('warning', warningListener);
const debugWasEnabled = debug.enabled('log4js:layouts');
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
const originalNamespace = debug.disable();
debug.enable(`${originalNamespace}, log4js:layouts`);
batch.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off('warning', warningListener);
process.on('warning', originalListener);
});
process.stderr.write = originalWrite;
debug.enable(originalNamespace);
});
const tokens = {
testString: 'testStringToken',
testFunction() {
return 'testFunctionToken';
},
fnThatUsesLogEvent(logEvent) {
return logEvent.level.toString();
},
};
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
const callStack =
' at Foo.bar [as baz] (repl:1:14)\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
const fileName = path.normalize('/log4js-node/test/tap/layouts-test.js');
const lineNumber = 1;
const columnNumber = 14;
const className = 'Foo';
const functionName = 'bar';
const functionAlias = 'baz';
const callerName = 'Foo.bar [as baz]';
const event = {
data: ['this is a test'],
startTime: new Date('2010-12-05 14:18:30.045'),
categoryName: 'multiple.levels.of.tests',
level: {
toString() {
return 'DEBUG';
},
colour: 'cyan',
},
context: tokens,
// location
callStack,
fileName,
lineNumber,
columnNumber,
className,
functionName,
functionAlias,
callerName,
};
event.startTime.getTimezoneOffset = () => -600;
const layout = require('../../lib/layouts').patternLayout;
t.test(
'should default to "time logLevel loggerName - message"',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
null,
`14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
);
assert.end();
}
);
t.test('%r should output time only', (assert) => {
testPattern(assert, layout, event, tokens, '%r', '14:18:30');
assert.end();
});
t.test('%p should output the log level', (assert) => {
testPattern(assert, layout, event, tokens, '%p', 'DEBUG');
assert.end();
});
t.test('%c should output the log category', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%c',
'multiple.levels.of.tests'
);
assert.end();
});
t.test('%m should output the log data', (assert) => {
testPattern(assert, layout, event, tokens, '%m', 'this is a test');
assert.end();
});
t.test('%n should output a new line', (assert) => {
testPattern(assert, layout, event, tokens, '%n', EOL);
assert.end();
});
t.test('%h should output hostname', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%h',
os.hostname().toString()
);
assert.end();
});
t.test('%z should output pid', (assert) => {
testPattern(assert, layout, event, tokens, '%z', process.pid.toString());
assert.end();
});
t.test('%z should pick up pid from log event if present', (assert) => {
event.pid = '1234';
testPattern(assert, layout, event, tokens, '%z', '1234');
delete event.pid;
assert.end();
});
t.test('%y should output pid (was cluster info)', (assert) => {
testPattern(assert, layout, event, tokens, '%y', process.pid.toString());
assert.end();
});
t.test(
'%c should handle category names like java-style package names',
(assert) => {
testPattern(assert, layout, event, tokens, '%c{1}', 'tests');
testPattern(assert, layout, event, tokens, '%c{2}', 'of.tests');
testPattern(assert, layout, event, tokens, '%c{3}', 'levels.of.tests');
testPattern(
assert,
layout,
event,
tokens,
'%c{4}',
'multiple.levels.of.tests'
);
testPattern(
assert,
layout,
event,
tokens,
'%c{5}',
'multiple.levels.of.tests'
);
testPattern(
assert,
layout,
event,
tokens,
'%c{99}',
'multiple.levels.of.tests'
);
assert.end();
}
);
t.test('%d should output the date in ISO8601 format', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%d',
'2010-12-05T14:18:30.045'
);
assert.end();
});
t.test('%d should allow for format specification', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%d{ISO8601}',
'2010-12-05T14:18:30.045'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{ISO8601_WITH_TZ_OFFSET}',
'2010-12-05T14:18:30.045+10:00'
);
const DEP0003 = debugLogs.filter(
(e) => e.indexOf('log4js-node-DEP0003') > -1
).length;
testPattern(
assert,
layout,
event,
tokens,
'%d{ABSOLUTE}', // deprecated
'14:18:30.045'
);
assert.equal(
debugLogs.filter((e) => e.indexOf('log4js-node-DEP0003') > -1).length,
DEP0003 + 1,
'deprecation log4js-node-DEP0003 emitted'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{ABSOLUTETIME}',
'14:18:30.045'
);
const DEP0004 = debugLogs.filter(
(e) => e.indexOf('log4js-node-DEP0004') > -1
).length;
testPattern(
assert,
layout,
event,
tokens,
'%d{DATE}', // deprecated
'05 12 2010 14:18:30.045'
);
assert.equal(
debugLogs.filter((e) => e.indexOf('log4js-node-DEP0004') > -1).length,
DEP0004 + 1,
'deprecation log4js-node-DEP0004 emitted'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{DATETIME}',
'05 12 2010 14:18:30.045'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yy MM dd hh mm ss}',
'10 12 05 14 18 30'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yyyy MM dd}',
'2010 12 05'
);
testPattern(
assert,
layout,
event,
tokens,
'%d{yyyy MM dd hh mm ss SSS}',
'2010 12 05 14 18 30 045'
);
assert.end();
});
t.test('%% should output %', (assert) => {
testPattern(assert, layout, event, tokens, '%%', '%');
assert.end();
});
t.test('%f should output filename', (assert) => {
testPattern(assert, layout, event, tokens, '%f', fileName);
assert.end();
});
t.test('%f should handle filename depth', (assert) => {
testPattern(assert, layout, event, tokens, '%f{1}', 'layouts-test.js');
testPattern(
assert,
layout,
event,
tokens,
'%f{2}',
path.join('tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{3}',
path.join('test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{4}',
path.join('log4js-node', 'test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{5}',
path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
);
testPattern(
assert,
layout,
event,
tokens,
'%f{99}',
path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
);
assert.end();
});
t.test('%f should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%.5f', fileName.slice(0, 5));
testPattern(
assert,
layout,
event,
tokens,
'%20f{1}',
' layouts-test.js'
);
testPattern(
assert,
layout,
event,
tokens,
'%30.30f{2}',
` ${path.join('tap', 'layouts-test.js')}`
);
testPattern(assert, layout, event, tokens, '%10.-5f{1}', ' st.js');
assert.end();
});
t.test('%l should output line number', (assert) => {
testPattern(assert, layout, event, tokens, '%l', lineNumber.toString());
assert.end();
});
t.test('%l should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%5.10l', ' 1');
testPattern(assert, layout, event, tokens, '%.5l', '1');
testPattern(assert, layout, event, tokens, '%.-5l', '1');
testPattern(assert, layout, event, tokens, '%-5l', '1 ');
assert.end();
});
t.test('%o should output column postion', (assert) => {
testPattern(assert, layout, event, tokens, '%o', columnNumber.toString());
assert.end();
});
t.test('%o should accept truncation and padding', (assert) => {
testPattern(assert, layout, event, tokens, '%5.10o', ' 14');
testPattern(assert, layout, event, tokens, '%.5o', '14');
testPattern(assert, layout, event, tokens, '%.1o', '1');
testPattern(assert, layout, event, tokens, '%.-1o', '4');
testPattern(assert, layout, event, tokens, '%-5o', '14 ');
assert.end();
});
t.test('%s should output stack', (assert) => {
testPattern(assert, layout, event, tokens, '%s', callStack);
assert.end();
});
t.test(
'%f should output empty string when fileName not exist',
(assert) => {
delete event.fileName;
testPattern(assert, layout, event, tokens, '%f', '');
assert.end();
}
);
t.test(
'%l should output empty string when lineNumber not exist',
(assert) => {
delete event.lineNumber;
testPattern(assert, layout, event, tokens, '%l', '');
assert.end();
}
);
t.test(
'%o should output empty string when columnNumber not exist',
(assert) => {
delete event.columnNumber;
testPattern(assert, layout, event, tokens, '%o', '');
assert.end();
}
);
t.test(
'%s should output empty string when callStack not exist',
(assert) => {
delete event.callStack;
testPattern(assert, layout, event, tokens, '%s', '');
assert.end();
}
);
t.test('should output anything not preceded by % as literal', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'blah blah blah',
'blah blah blah'
);
assert.end();
});
t.test(
'should output the original string if no replacer matches the token',
(assert) => {
testPattern(assert, layout, event, tokens, '%a{3}', 'a{3}');
assert.end();
}
);
t.test('should handle complicated patterns', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n', // deprecated
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
testPattern(
assert,
layout,
event,
tokens,
'%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n',
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
);
assert.end();
});
t.test('should truncate fields if specified', (assert) => {
testPattern(assert, layout, event, tokens, '%.4m', 'this');
testPattern(assert, layout, event, tokens, '%.7m', 'this is');
testPattern(assert, layout, event, tokens, '%.9m', 'this is a');
testPattern(assert, layout, event, tokens, '%.14m', 'this is a test');
testPattern(
assert,
layout,
event,
tokens,
'%.2919102m',
'this is a test'
);
testPattern(assert, layout, event, tokens, '%.-4m', 'test');
assert.end();
});
t.test('should pad fields if specified', (assert) => {
testPattern(assert, layout, event, tokens, '%10p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%8p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%6p', ' DEBUG');
testPattern(assert, layout, event, tokens, '%4p', 'DEBUG');
testPattern(assert, layout, event, tokens, '%-4p', 'DEBUG');
testPattern(assert, layout, event, tokens, '%-6p', 'DEBUG ');
testPattern(assert, layout, event, tokens, '%-8p', 'DEBUG ');
testPattern(assert, layout, event, tokens, '%-10p', 'DEBUG ');
assert.end();
});
t.test('%[%r%] should output colored time', (assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%[%r%]',
'\x1B[36m14:18:30\x1B[39m'
);
assert.end();
});
t.test(
'%x{testString} should output the string stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{testString}',
'testStringToken'
);
assert.end();
}
);
t.test(
'%x{testFunction} should output the result of the function stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{testFunction}',
'testFunctionToken'
);
assert.end();
}
);
t.test(
'%x{doesNotExist} should output the string stored in tokens',
(assert) => {
testPattern(assert, layout, event, tokens, '%x{doesNotExist}', 'null');
assert.end();
}
);
t.test(
'%x{fnThatUsesLogEvent} should be able to use the logEvent',
(assert) => {
testPattern(
assert,
layout,
event,
tokens,
'%x{fnThatUsesLogEvent}',
'DEBUG'
);
assert.end();
}
);
t.test('%x should output the string stored in tokens', (assert) => {
testPattern(assert, layout, event, tokens, '%x', 'null');
assert.end();
});
t.test(
'%X{testString} should output the string stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{testString}',
'testStringToken'
);
assert.end();
}
);
t.test(
'%X{testFunction} should output the result of the function stored in tokens',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{testFunction}',
'testFunctionToken'
);
assert.end();
}
);
t.test(
'%X{doesNotExist} should output the string stored in tokens',
(assert) => {
testPattern(assert, layout, event, {}, '%X{doesNotExist}', 'null');
assert.end();
}
);
t.test(
'%X{fnThatUsesLogEvent} should be able to use the logEvent',
(assert) => {
testPattern(
assert,
layout,
event,
{},
'%X{fnThatUsesLogEvent}',
'DEBUG'
);
assert.end();
}
);
t.test('%X should output the string stored in tokens', (assert) => {
testPattern(assert, layout, event, {}, '%X', 'null');
assert.end();
});
t.test('%M should output function name', (assert) => {
testPattern(assert, layout, event, tokens, '%M', functionName);
assert.end();
});
t.test(
'%M should output empty string when functionName not exist',
(assert) => {
delete event.functionName;
testPattern(assert, layout, event, tokens, '%M', '');
assert.end();
}
);
t.test('%C should output class name', (assert) => {
testPattern(assert, layout, event, tokens, '%C', className);
assert.end();
});
t.test(
'%C should output empty string when className not exist',
(assert) => {
delete event.className;
testPattern(assert, layout, event, tokens, '%C', '');
assert.end();
}
);
t.test('%A should output function alias', (assert) => {
testPattern(assert, layout, event, tokens, '%A', functionAlias);
assert.end();
});
t.test(
'%A should output empty string when functionAlias not exist',
(assert) => {
delete event.functionAlias;
testPattern(assert, layout, event, tokens, '%A', '');
assert.end();
}
);
t.test('%F should output fully qualified caller name', (assert) => {
testPattern(assert, layout, event, tokens, '%F', callerName);
assert.end();
});
t.test(
'%F should output empty string when callerName not exist',
(assert) => {
delete event.callerName;
testPattern(assert, layout, event, tokens, '%F', '');
assert.end();
}
);
t.end();
});
batch.test('layout makers', (t) => {
const layouts = require('../../lib/layouts');
t.test('should have a maker for each layout', (assert) => {
assert.ok(layouts.layout('messagePassThrough'));
assert.ok(layouts.layout('basic'));
assert.ok(layouts.layout('colored'));
assert.ok(layouts.layout('coloured'));
assert.ok(layouts.layout('pattern'));
assert.ok(layouts.layout('dummy'));
assert.end();
});
t.test(
'layout pattern maker should pass pattern and tokens to layout from config',
(assert) => {
let layout = layouts.layout('pattern', { pattern: '%%' });
assert.equal(layout({}), '%');
layout = layouts.layout('pattern', {
pattern: '%x{testStringToken}',
tokens: { testStringToken: 'cheese' },
});
assert.equal(layout({}), 'cheese');
assert.end();
}
);
t.end();
});
batch.test('add layout', (t) => {
const layouts = require('../../lib/layouts');
t.test('should be able to add a layout', (assert) => {
layouts.addLayout('test_layout', (config) => {
assert.equal(config, 'test_config');
return function(logEvent) {
return `TEST LAYOUT >${logEvent.data}`;
};
});
const serializer = layouts.layout('test_layout', 'test_config');
assert.ok(serializer);
assert.equal(serializer({ data: 'INPUT' }), 'TEST LAYOUT >INPUT');
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./examples/rabbitmq-appender.js | // Note that rabbitmq appender needs install amqplib to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console',
},
file: {
type: 'dateFile',
filename: 'logs/log.txt',
pattern: 'yyyyMMdd',
alwaysIncludePattern: false,
},
mq: {
type: '@log4js-node/rabbitmq',
host: '127.0.0.1',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true,
layout: {
type: 'pattern',
pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m',
},
},
},
categories: {
default: { appenders: ['out'], level: 'info' },
dateFile: { appenders: ['file'], level: 'info' },
rabbitmq: { appenders: ['mq'], level: 'info' },
},
});
const log = log4js.getLogger('console');
const logRabbitmq = log4js.getLogger('rabbitmq');
function doTheLogging(x) {
log.info('Logging something %d', x);
logRabbitmq.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}
| // Note that rabbitmq appender needs install amqplib to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console',
},
file: {
type: 'dateFile',
filename: 'logs/log.txt',
pattern: 'yyyyMMdd',
alwaysIncludePattern: false,
},
mq: {
type: '@log4js-node/rabbitmq',
host: '127.0.0.1',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true,
layout: {
type: 'pattern',
pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m',
},
},
},
categories: {
default: { appenders: ['out'], level: 'info' },
dateFile: { appenders: ['file'], level: 'info' },
rabbitmq: { appenders: ['mq'], level: 'info' },
},
});
const log = log4js.getLogger('console');
const logRabbitmq = log4js.getLogger('rabbitmq');
function doTheLogging(x) {
log.info('Logging something %d', x);
logRabbitmq.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./lib/configuration.js | const util = require('util');
const debug = require('debug')('log4js:configuration');
const preProcessingListeners = [];
const listeners = [];
const not = (thing) => !thing;
const anObject = (thing) =>
thing && typeof thing === 'object' && !Array.isArray(thing);
const validIdentifier = (thing) => /^[A-Za-z][A-Za-z0-9_]*$/g.test(thing);
const anInteger = (thing) =>
thing && typeof thing === 'number' && Number.isInteger(thing);
const addListener = (fn) => {
listeners.push(fn);
debug(`Added listener, now ${listeners.length} listeners`);
};
const addPreProcessingListener = (fn) => {
preProcessingListeners.push(fn);
debug(
`Added pre-processing listener, now ${preProcessingListeners.length} listeners`
);
};
const throwExceptionIf = (config, checks, message) => {
const tests = Array.isArray(checks) ? checks : [checks];
tests.forEach((test) => {
if (test) {
throw new Error(
`Problem with log4js configuration: (${util.inspect(config, {
depth: 5,
})}) - ${message}`
);
}
});
};
const configure = (candidate) => {
debug('New configuration to be validated: ', candidate);
throwExceptionIf(candidate, not(anObject(candidate)), 'must be an object.');
debug(`Calling pre-processing listeners (${preProcessingListeners.length})`);
preProcessingListeners.forEach((listener) => listener(candidate));
debug('Configuration pre-processing finished.');
debug(`Calling configuration listeners (${listeners.length})`);
listeners.forEach((listener) => listener(candidate));
debug('Configuration finished.');
};
module.exports = {
configure,
addListener,
addPreProcessingListener,
throwExceptionIf,
anObject,
anInteger,
validIdentifier,
not,
};
| const util = require('util');
const debug = require('debug')('log4js:configuration');
const preProcessingListeners = [];
const listeners = [];
const not = (thing) => !thing;
const anObject = (thing) =>
thing && typeof thing === 'object' && !Array.isArray(thing);
const validIdentifier = (thing) => /^[A-Za-z][A-Za-z0-9_]*$/g.test(thing);
const anInteger = (thing) =>
thing && typeof thing === 'number' && Number.isInteger(thing);
const addListener = (fn) => {
listeners.push(fn);
debug(`Added listener, now ${listeners.length} listeners`);
};
const addPreProcessingListener = (fn) => {
preProcessingListeners.push(fn);
debug(
`Added pre-processing listener, now ${preProcessingListeners.length} listeners`
);
};
const throwExceptionIf = (config, checks, message) => {
const tests = Array.isArray(checks) ? checks : [checks];
tests.forEach((test) => {
if (test) {
throw new Error(
`Problem with log4js configuration: (${util.inspect(config, {
depth: 5,
})}) - ${message}`
);
}
});
};
const configure = (candidate) => {
debug('New configuration to be validated: ', candidate);
throwExceptionIf(candidate, not(anObject(candidate)), 'must be an object.');
debug(`Calling pre-processing listeners (${preProcessingListeners.length})`);
preProcessingListeners.forEach((listener) => listener(candidate));
debug('Configuration pre-processing finished.');
debug(`Calling configuration listeners (${listeners.length})`);
listeners.forEach((listener) => listener(candidate));
debug('Configuration finished.');
};
module.exports = {
configure,
addListener,
addPreProcessingListener,
throwExceptionIf,
anObject,
anInteger,
validIdentifier,
not,
};
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./docs/file.md | # File Appender
The file appender writes log events to a file. It supports an optional maximum file size, and will keep a configurable number of backups. When using the file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
- `type` - `"file"`
- `filename` - `string` - the path of the file where you want your logs written.
- `maxLogSize` - `integer` (optional, defaults to undefined) - the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
`maxLogSize` can also accept `string` with the size suffixes: **_K_**, **_M_**, **_G_** such as `1K`, `1M`, `1G`.
- `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling (excluding the hot file).
- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
- `encoding` - `string` (default "utf-8")
- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
- `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
- `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
- `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
- `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [dateFile appender](dateFile.md) as well. So you could roll files by both date and size.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
const logger = log4js.getLogger();
logger.debug("I will be logged in all-the-logs.log");
```
This example will result in a single log file (`all-the-logs.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: "file",
filename: "all-the-logs.log",
maxLogSize: 10485760,
backups: 3,
compress: true,
},
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
```
This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: "file", filename: "out.log" },
},
categories: { default: { appenders: ["output"], level: "debug" } },
});
let paused = false;
process.on("log4js:pause", (value) => (paused = value));
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| # File Appender
The file appender writes log events to a file. It supports an optional maximum file size, and will keep a configurable number of backups. When using the file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself.
## Configuration
- `type` - `"file"`
- `filename` - `string` - the path of the file where you want your logs written.
- `maxLogSize` - `integer` (optional, defaults to undefined) - the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
`maxLogSize` can also accept `string` with the size suffixes: **_K_**, **_M_**, **_G_** such as `1K`, `1M`, `1G`.
- `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling (excluding the hot file).
- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md)
Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams):
- `encoding` - `string` (default "utf-8")
- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes))
- `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags))
- `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension)
- `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
- `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [dateFile appender](dateFile.md) as well. So you could roll files by both date and size.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
const logger = log4js.getLogger();
logger.debug("I will be logged in all-the-logs.log");
```
This example will result in a single log file (`all-the-logs.log`) containing the log messages.
## Example with log rolling (and compressed backups)
```javascript
log4js.configure({
appenders: {
everything: {
type: "file",
filename: "all-the-logs.log",
maxLogSize: 10485760,
backups: 3,
compress: true,
},
},
categories: {
default: { appenders: ["everything"], level: "debug" },
},
});
```
This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on.
## Memory usage
If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`.
```javascript
log4js.configure({
appenders: {
output: { type: "file", filename: "out.log" },
},
categories: { default: { appenders: ["output"], level: "debug" } },
});
let paused = false;
process.on("log4js:pause", (value) => (paused = value));
const logger = log4js.getLogger();
while (!paused) {
logger.info("I'm logging, but I will stop once we start buffering");
}
```
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./examples/smtp-appender.js | // Note that smtp appender needs nodemailer to work.
// If you haven't got nodemailer installed, you'll get cryptic
// "cannot find module" errors when using the smtp appender
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console',
},
mail: {
type: '@log4js-node/smtp',
recipients: 'logfilerecipient@logging.com',
sendInterval: 5,
transport: 'SMTP',
SMTP: {
host: 'smtp.gmail.com',
secureConnection: true,
port: 465,
auth: {
user: 'someone@gmail',
pass: '********************',
},
debug: true,
},
},
},
categories: {
default: { appenders: ['out'], level: 'info' },
mailer: { appenders: ['mail'], level: 'info' },
},
});
const log = log4js.getLogger('test');
const logmailer = log4js.getLogger('mailer');
function doTheLogging(x) {
log.info('Logging something %d', x);
logmailer.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}
| // Note that smtp appender needs nodemailer to work.
// If you haven't got nodemailer installed, you'll get cryptic
// "cannot find module" errors when using the smtp appender
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console',
},
mail: {
type: '@log4js-node/smtp',
recipients: 'logfilerecipient@logging.com',
sendInterval: 5,
transport: 'SMTP',
SMTP: {
host: 'smtp.gmail.com',
secureConnection: true,
port: 465,
auth: {
user: 'someone@gmail',
pass: '********************',
},
debug: true,
},
},
},
categories: {
default: { appenders: ['out'], level: 'info' },
mailer: { appenders: ['mail'], level: 'info' },
},
});
const log = log4js.getLogger('test');
const logmailer = log4js.getLogger('mailer');
function doTheLogging(x) {
log.info('Logging something %d', x);
logmailer.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./docs/contrib-guidelines.md | # Want to help?
I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged:
- Fork the repo, make a feature branch just for your changes
- On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features.
- Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-)
- Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then).
| # Want to help?
I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged:
- Fork the repo, make a feature branch just for your changes
- On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features.
- Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-)
- Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then).
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/multiprocess-test.js | const childProcess = require('child_process');
const { test } = require('tap');
const flatted = require('flatted');
const sandbox = require('@log4js-node/sandboxed-module');
const recording = require('../../lib/appenders/recording');
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end() {
fakeNet.closeCalled = true;
},
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: '1.2.3.4',
remotePort: '1234',
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
},
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
},
};
},
};
}
test('Multiprocess Appender', async (batch) => {
batch.beforeEach((done) => {
recording.erase();
if (typeof done === 'function') {
done();
}
});
batch.test('worker', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: {
worker: {
type: 'multiprocess',
mode: 'worker',
loggerPort: 1234,
loggerHost: 'pants',
},
},
categories: { default: { appenders: ['worker'], level: 'trace' } },
});
const logger = log4js.getLogger();
logger.info('before connect');
fakeNet.cbs.connect();
logger.info('after connect');
fakeNet.cbs.close();
logger.info('after error, before connect');
fakeNet.cbs.connect();
logger.info('after error, after connect');
logger.error(new Error('Error test'));
const net = fakeNet;
t.test(
'should open a socket to the loggerPort and loggerHost',
(assert) => {
assert.equal(net.port, 1234);
assert.equal(net.host, 'pants');
assert.end();
}
);
t.test(
'should buffer messages written before socket is connected',
(assert) => {
assert.match(net.data[0], 'before connect');
assert.end();
}
);
t.test(
'should write log messages to socket as flatted strings with a terminator string',
(assert) => {
assert.match(net.data[0], 'before connect');
assert.equal(net.data[1], '__LOG4JS__');
assert.match(net.data[2], 'after connect');
assert.equal(net.data[3], '__LOG4JS__');
assert.equal(net.encoding, 'utf8');
assert.end();
}
);
t.test('should attempt to re-open the socket on error', (assert) => {
assert.match(net.data[4], 'after error, before connect');
assert.equal(net.data[5], '__LOG4JS__');
assert.match(net.data[6], 'after error, after connect');
assert.equal(net.data[7], '__LOG4JS__');
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.test('should serialize an Error correctly', (assert) => {
assert.ok(
flatted.parse(net.data[8]).data[0].stack,
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(net.data[8]).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.test('worker with timeout', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
categories: { default: { appenders: ['worker'], level: 'trace' } },
});
const logger = log4js.getLogger();
logger.info('before connect');
fakeNet.cbs.connect();
logger.info('after connect');
fakeNet.cbs.timeout();
logger.info('after timeout, before close');
fakeNet.cbs.close();
logger.info('after close, before connect');
fakeNet.cbs.connect();
logger.info('after close, after connect');
const net = fakeNet;
t.test('should attempt to re-open the socket', (assert) => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], 'before connect');
assert.match(net.data[2], 'after connect');
assert.match(net.data[4], 'after timeout, before close');
assert.match(net.data[6], 'after close, before connect');
assert.match(net.data[8], 'after close, after connect');
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test('worker with error', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
categories: { default: { appenders: ['worker'], level: 'trace' } },
});
const logger = log4js.getLogger();
logger.info('before connect');
fakeNet.cbs.connect();
logger.info('after connect');
fakeNet.cbs.error();
logger.info('after error, before close');
fakeNet.cbs.close();
logger.info('after close, before connect');
fakeNet.cbs.connect();
logger.info('after close, after connect');
const net = fakeNet;
t.test('should attempt to re-open the socket', (assert) => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], 'before connect');
assert.match(net.data[2], 'after connect');
assert.match(net.data[4], 'after error, before close');
assert.match(net.data[6], 'after close, before connect');
assert.match(net.data[8], 'after close, after connect');
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test('worker defaults', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
categories: { default: { appenders: ['worker'], level: 'trace' } },
});
t.test('should open a socket to localhost:5000', (assert) => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, 'localhost');
assert.end();
});
t.end();
});
batch.test('master', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recorder: { type: 'recording' },
master: {
type: 'multiprocess',
mode: 'master',
loggerPort: 1234,
loggerHost: 'server',
appender: 'recorder',
},
},
categories: { default: { appenders: ['master'], level: 'trace' } },
});
const net = fakeNet;
t.test(
'should listen for log messages on loggerPort and loggerHost',
(assert) => {
assert.equal(net.port, 1234);
assert.equal(net.host, 'server');
assert.end();
}
);
t.test('should return the underlying appender', (assert) => {
log4js
.getLogger()
.info('this should be sent to the actual appender directly');
assert.equal(
recording.replay()[0].data[0],
'this should be sent to the actual appender directly'
);
assert.end();
});
t.test('should log the error on "error" event', (assert) => {
net.cbs.error(new Error('Expected error'));
const logEvents = recording.replay();
assert.plan(2);
assert.equal(logEvents.length, 1);
assert.equal(
'A worker log process hung up unexpectedly',
logEvents[0].data[0]
);
});
t.test('when a client connects', (assert) => {
const logString = `${flatted.stringify({
level: { level: 10000, levelStr: 'DEBUG' },
data: ['some debug'],
})}__LOG4JS__`;
net.cbs.data(
`${flatted.stringify({
level: { level: 40000, levelStr: 'ERROR' },
data: ['an error message'],
})}__LOG4JS__`
);
net.cbs.data(logString.slice(0, 10));
net.cbs.data(logString.slice(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
`${flatted.stringify({
level: { level: 50000, levelStr: 'FATAL' },
data: ["that's all folks"],
})}__LOG4JS__`
);
net.cbs.data('bad message__LOG4JS__');
const logEvents = recording.replay();
// should parse log messages into log events and send to appender
assert.equal(logEvents[0].level.toString(), 'ERROR');
assert.equal(logEvents[0].data[0], 'an error message');
assert.equal(logEvents[0].remoteAddress, '1.2.3.4');
assert.equal(logEvents[0].remotePort, '1234');
// should parse log messages split into multiple chunks'
assert.equal(logEvents[1].level.toString(), 'DEBUG');
assert.equal(logEvents[1].data[0], 'some debug');
assert.equal(logEvents[1].remoteAddress, '1.2.3.4');
assert.equal(logEvents[1].remotePort, '1234');
// should parse multiple log messages in a single chunk'
assert.equal(logEvents[2].data[0], 'some debug');
assert.equal(logEvents[3].data[0], 'some debug');
assert.equal(logEvents[4].data[0], 'some debug');
// should handle log messages sent as part of end event'
assert.equal(logEvents[5].data[0], "that's all folks");
// should handle unparseable log messages
assert.equal(logEvents[6].level.toString(), 'ERROR');
assert.equal(logEvents[6].categoryName, 'log4js');
assert.equal(logEvents[6].data[0], 'Unable to parse log:');
assert.equal(logEvents[6].data[1], 'bad message');
assert.end();
});
t.end();
});
batch.test('master without actual appender throws error', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
t.throws(
() =>
log4js.configure({
appenders: { master: { type: 'multiprocess', mode: 'master' } },
categories: { default: { appenders: ['master'], level: 'trace' } },
}),
new Error('multiprocess master must have an "appender" defined')
);
t.end();
});
batch.test('master with unknown appender throws error', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
t.throws(
() =>
log4js.configure({
appenders: {
master: {
type: 'multiprocess',
mode: 'master',
appender: 'cheese',
},
},
categories: { default: { appenders: ['master'], level: 'trace' } },
}),
new Error('multiprocess master appender "cheese" not defined')
);
t.end();
});
batch.test('master defaults', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: {
stdout: { type: 'stdout' },
master: { type: 'multiprocess', mode: 'master', appender: 'stdout' },
},
categories: { default: { appenders: ['master'], level: 'trace' } },
});
t.test('should listen for log messages on localhost:5000', (assert) => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, 'localhost');
assert.end();
});
t.end();
});
await batch.test('e2e test', async (assert) => {
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recording: { type: 'recording' },
master: {
type: 'multiprocess',
mode: 'master',
appender: 'recording',
loggerPort: 5001,
},
},
categories: { default: { appenders: ['recording'], level: 'trace' } },
});
const child = childProcess.fork(
require.resolve('../multiprocess-worker.js'),
['start-multiprocess-worker', '5001'],
{ stdio: 'inherit' }
);
const actualMsg = await new Promise((res, rej) => {
child.on('message', res);
child.on('error', rej);
});
const logEvents = recording.replay();
assert.equal(actualMsg, 'worker is done');
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'Logging from worker');
assert.end();
});
batch.end();
});
| const childProcess = require('child_process');
const { test } = require('tap');
const flatted = require('flatted');
const sandbox = require('@log4js-node/sandboxed-module');
const recording = require('../../lib/appenders/recording');
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end() {
fakeNet.closeCalled = true;
},
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: '1.2.3.4',
remotePort: '1234',
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
},
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
},
};
},
};
}
test('Multiprocess Appender', async (batch) => {
batch.beforeEach((done) => {
recording.erase();
if (typeof done === 'function') {
done();
}
});
batch.test('worker', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: {
worker: {
type: 'multiprocess',
mode: 'worker',
loggerPort: 1234,
loggerHost: 'pants',
},
},
categories: { default: { appenders: ['worker'], level: 'trace' } },
});
const logger = log4js.getLogger();
logger.info('before connect');
fakeNet.cbs.connect();
logger.info('after connect');
fakeNet.cbs.close();
logger.info('after error, before connect');
fakeNet.cbs.connect();
logger.info('after error, after connect');
logger.error(new Error('Error test'));
const net = fakeNet;
t.test(
'should open a socket to the loggerPort and loggerHost',
(assert) => {
assert.equal(net.port, 1234);
assert.equal(net.host, 'pants');
assert.end();
}
);
t.test(
'should buffer messages written before socket is connected',
(assert) => {
assert.match(net.data[0], 'before connect');
assert.end();
}
);
t.test(
'should write log messages to socket as flatted strings with a terminator string',
(assert) => {
assert.match(net.data[0], 'before connect');
assert.equal(net.data[1], '__LOG4JS__');
assert.match(net.data[2], 'after connect');
assert.equal(net.data[3], '__LOG4JS__');
assert.equal(net.encoding, 'utf8');
assert.end();
}
);
t.test('should attempt to re-open the socket on error', (assert) => {
assert.match(net.data[4], 'after error, before connect');
assert.equal(net.data[5], '__LOG4JS__');
assert.match(net.data[6], 'after error, after connect');
assert.equal(net.data[7], '__LOG4JS__');
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.test('should serialize an Error correctly', (assert) => {
assert.ok(
flatted.parse(net.data[8]).data[0].stack,
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(net.data[8]).data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.test('worker with timeout', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
categories: { default: { appenders: ['worker'], level: 'trace' } },
});
const logger = log4js.getLogger();
logger.info('before connect');
fakeNet.cbs.connect();
logger.info('after connect');
fakeNet.cbs.timeout();
logger.info('after timeout, before close');
fakeNet.cbs.close();
logger.info('after close, before connect');
fakeNet.cbs.connect();
logger.info('after close, after connect');
const net = fakeNet;
t.test('should attempt to re-open the socket', (assert) => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], 'before connect');
assert.match(net.data[2], 'after connect');
assert.match(net.data[4], 'after timeout, before close');
assert.match(net.data[6], 'after close, before connect');
assert.match(net.data[8], 'after close, after connect');
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test('worker with error', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
categories: { default: { appenders: ['worker'], level: 'trace' } },
});
const logger = log4js.getLogger();
logger.info('before connect');
fakeNet.cbs.connect();
logger.info('after connect');
fakeNet.cbs.error();
logger.info('after error, before close');
fakeNet.cbs.close();
logger.info('after close, before connect');
fakeNet.cbs.connect();
logger.info('after close, after connect');
const net = fakeNet;
t.test('should attempt to re-open the socket', (assert) => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], 'before connect');
assert.match(net.data[2], 'after connect');
assert.match(net.data[4], 'after error, before close');
assert.match(net.data[6], 'after close, before connect');
assert.match(net.data[8], 'after close, after connect');
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test('worker defaults', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
categories: { default: { appenders: ['worker'], level: 'trace' } },
});
t.test('should open a socket to localhost:5000', (assert) => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, 'localhost');
assert.end();
});
t.end();
});
batch.test('master', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recorder: { type: 'recording' },
master: {
type: 'multiprocess',
mode: 'master',
loggerPort: 1234,
loggerHost: 'server',
appender: 'recorder',
},
},
categories: { default: { appenders: ['master'], level: 'trace' } },
});
const net = fakeNet;
t.test(
'should listen for log messages on loggerPort and loggerHost',
(assert) => {
assert.equal(net.port, 1234);
assert.equal(net.host, 'server');
assert.end();
}
);
t.test('should return the underlying appender', (assert) => {
log4js
.getLogger()
.info('this should be sent to the actual appender directly');
assert.equal(
recording.replay()[0].data[0],
'this should be sent to the actual appender directly'
);
assert.end();
});
t.test('should log the error on "error" event', (assert) => {
net.cbs.error(new Error('Expected error'));
const logEvents = recording.replay();
assert.plan(2);
assert.equal(logEvents.length, 1);
assert.equal(
'A worker log process hung up unexpectedly',
logEvents[0].data[0]
);
});
t.test('when a client connects', (assert) => {
const logString = `${flatted.stringify({
level: { level: 10000, levelStr: 'DEBUG' },
data: ['some debug'],
})}__LOG4JS__`;
net.cbs.data(
`${flatted.stringify({
level: { level: 40000, levelStr: 'ERROR' },
data: ['an error message'],
})}__LOG4JS__`
);
net.cbs.data(logString.slice(0, 10));
net.cbs.data(logString.slice(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
`${flatted.stringify({
level: { level: 50000, levelStr: 'FATAL' },
data: ["that's all folks"],
})}__LOG4JS__`
);
net.cbs.data('bad message__LOG4JS__');
const logEvents = recording.replay();
// should parse log messages into log events and send to appender
assert.equal(logEvents[0].level.toString(), 'ERROR');
assert.equal(logEvents[0].data[0], 'an error message');
assert.equal(logEvents[0].remoteAddress, '1.2.3.4');
assert.equal(logEvents[0].remotePort, '1234');
// should parse log messages split into multiple chunks'
assert.equal(logEvents[1].level.toString(), 'DEBUG');
assert.equal(logEvents[1].data[0], 'some debug');
assert.equal(logEvents[1].remoteAddress, '1.2.3.4');
assert.equal(logEvents[1].remotePort, '1234');
// should parse multiple log messages in a single chunk'
assert.equal(logEvents[2].data[0], 'some debug');
assert.equal(logEvents[3].data[0], 'some debug');
assert.equal(logEvents[4].data[0], 'some debug');
// should handle log messages sent as part of end event'
assert.equal(logEvents[5].data[0], "that's all folks");
// should handle unparseable log messages
assert.equal(logEvents[6].level.toString(), 'ERROR');
assert.equal(logEvents[6].categoryName, 'log4js');
assert.equal(logEvents[6].data[0], 'Unable to parse log:');
assert.equal(logEvents[6].data[1], 'bad message');
assert.end();
});
t.end();
});
batch.test('master without actual appender throws error', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
t.throws(
() =>
log4js.configure({
appenders: { master: { type: 'multiprocess', mode: 'master' } },
categories: { default: { appenders: ['master'], level: 'trace' } },
}),
new Error('multiprocess master must have an "appender" defined')
);
t.end();
});
batch.test('master with unknown appender throws error', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
t.throws(
() =>
log4js.configure({
appenders: {
master: {
type: 'multiprocess',
mode: 'master',
appender: 'cheese',
},
},
categories: { default: { appenders: ['master'], level: 'trace' } },
}),
new Error('multiprocess master appender "cheese" not defined')
);
t.end();
});
batch.test('master defaults', (t) => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
log4js.configure({
appenders: {
stdout: { type: 'stdout' },
master: { type: 'multiprocess', mode: 'master', appender: 'stdout' },
},
categories: { default: { appenders: ['master'], level: 'trace' } },
});
t.test('should listen for log messages on localhost:5000', (assert) => {
assert.equal(fakeNet.port, 5000);
assert.equal(fakeNet.host, 'localhost');
assert.end();
});
t.end();
});
await batch.test('e2e test', async (assert) => {
const log4js = sandbox.require('../../lib/log4js', {
requires: {
'./appenders/recording': recording,
},
});
log4js.configure({
appenders: {
recording: { type: 'recording' },
master: {
type: 'multiprocess',
mode: 'master',
appender: 'recording',
loggerPort: 5001,
},
},
categories: { default: { appenders: ['recording'], level: 'trace' } },
});
const child = childProcess.fork(
require.resolve('../multiprocess-worker.js'),
['start-multiprocess-worker', '5001'],
{ stdio: 'inherit' }
);
const actualMsg = await new Promise((res, rej) => {
child.on('message', res);
child.on('error', rej);
});
const logEvents = recording.replay();
assert.equal(actualMsg, 'worker is done');
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'Logging from worker');
assert.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./docs/index.md | # log4js-node
This is a conversion of the [log4js](https://github.com/stritti/log4js)
framework to work with [node](http://nodejs.org). I started out just stripping out the browser-specific code and tidying up some of the javascript to work better in node. It grew from there. Although it's got a similar name to the Java library [log4j](https://logging.apache.org/log4j/2.x/), thinking that it will behave the same way will only bring you sorrow and confusion.
[Changes in version 3.x](v3-changes.md)
## Migrating from log4js < v2.x?
There have been a few changes between log4js 1.x and 2.x (and 0.x too). You should probably read this [migration guide](migration-guide.md) if things aren't working.
## Features
- coloured console logging to [stdout](stdout.md) or [stderr](stderr.md)
- [file appender](file.md), with configurable log rolling based on file size or [date](dateFile.md)
- [SMTP appender](https://github.com/log4js-node/smtp)
- [GELF appender](https://github.com/log4js-node/gelf)
- [Loggly appender](https://github.com/log4js-node/loggly)
- [Logstash UDP appender](https://github.com/log4js-node/logstashUDP)
- logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender
- [TCP appender](tcp.md) (useful when you've got multiple servers but want to centralise logging)
- a [logger for connect/express](connect-logger.md) servers
- configurable log message [layout/patterns](layouts.md)
- different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
- built-in support for logging with node core's `cluster` module
- third-party [InfluxDB appender](https://github.com/rnd-debug/log4js-influxdb-appender)
## Installation
```bash
npm install log4js
```
## Usage
Minimalist version:
```javascript
var log4js = require("log4js");
var logger = log4js.getLogger();
logger.level = "debug"; // default level is OFF - which means no logs at all.
logger.debug("Some debug messages");
```
## Clustering
If you use node's cluster, or passenger, or pm2, then you should read this [clustering guide](clustering.md)
## Note for library makers
If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api).
## License
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
keep the original copyright and author credits in place, except in sections that I have rewritten
extensively.
| # log4js-node
This is a conversion of the [log4js](https://github.com/stritti/log4js)
framework to work with [node](http://nodejs.org). I started out just stripping out the browser-specific code and tidying up some of the javascript to work better in node. It grew from there. Although it's got a similar name to the Java library [log4j](https://logging.apache.org/log4j/2.x/), thinking that it will behave the same way will only bring you sorrow and confusion.
[Changes in version 3.x](v3-changes.md)
## Migrating from log4js < v2.x?
There have been a few changes between log4js 1.x and 2.x (and 0.x too). You should probably read this [migration guide](migration-guide.md) if things aren't working.
## Features
- coloured console logging to [stdout](stdout.md) or [stderr](stderr.md)
- [file appender](file.md), with configurable log rolling based on file size or [date](dateFile.md)
- [SMTP appender](https://github.com/log4js-node/smtp)
- [GELF appender](https://github.com/log4js-node/gelf)
- [Loggly appender](https://github.com/log4js-node/loggly)
- [Logstash UDP appender](https://github.com/log4js-node/logstashUDP)
- logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender
- [TCP appender](tcp.md) (useful when you've got multiple servers but want to centralise logging)
- a [logger for connect/express](connect-logger.md) servers
- configurable log message [layout/patterns](layouts.md)
- different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
- built-in support for logging with node core's `cluster` module
- third-party [InfluxDB appender](https://github.com/rnd-debug/log4js-influxdb-appender)
## Installation
```bash
npm install log4js
```
## Usage
Minimalist version:
```javascript
var log4js = require("log4js");
var logger = log4js.getLogger();
logger.level = "debug"; // default level is OFF - which means no logs at all.
logger.debug("Some debug messages");
```
## Clustering
If you use node's cluster, or passenger, or pm2, then you should read this [clustering guide](clustering.md)
## Note for library makers
If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api).
## License
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
keep the original copyright and author credits in place, except in sections that I have rewritten
extensively.
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./commitlint.config.js | module.exports = { extends: ['@commitlint/config-conventional'] };
| module.exports = { extends: ['@commitlint/config-conventional'] };
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./examples/example.js | 'use strict';
const log4js = require('../lib/log4js');
// log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' },
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
},
});
// a custom logger outside of the log4js/lib/appenders directory can be accessed like so
// log4js.configure({
// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } }
// ...
// });
const logger = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', {
some: 'otherObject',
useful_for: 'debug purposes',
});
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.log('Something funny about cheese.');
logger.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger.error('Cheese %s is too ripe!', 'gouda');
logger.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
| 'use strict';
const log4js = require('../lib/log4js');
// log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: {
cheeseLogs: { type: 'file', filename: 'cheese.log' },
console: { type: 'console' },
},
categories: {
cheese: { appenders: ['cheeseLogs'], level: 'error' },
another: { appenders: ['console'], level: 'trace' },
default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
},
});
// a custom logger outside of the log4js/lib/appenders directory can be accessed like so
// log4js.configure({
// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } }
// ...
// });
const logger = log4js.getLogger('cheese');
// only errors and above get logged.
const otherLogger = log4js.getLogger();
// this will get coloured output on console, and appear in cheese.log
otherLogger.error('AAArgh! Something went wrong', {
some: 'otherObject',
useful_for: 'debug purposes',
});
otherLogger.log('This should appear as info output');
// these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.log('Something funny about cheese.');
logger.warn('Cheese is quite smelly.');
// these end up only in cheese.log
logger.error('Cheese %s is too ripe!', 'gouda');
logger.fatal('Cheese was breeding ground for listeria.');
// these don't end up in cheese.log, but will appear on the console
const anotherLogger = log4js.getLogger('another');
anotherLogger.debug('Just checking');
// will also go to console and cheese.log, since that's configured for all categories
const pantsLog = log4js.getLogger('pants');
pantsLog.debug('Something for pants');
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./lib/appenders/noLogFilter.js | const debug = require('debug')('log4js:noLogFilter');
/**
* The function removes empty or null regexp from the array
* @param {string[]} regexp
* @returns {string[]} a filtered string array with not empty or null regexp
*/
function removeNullOrEmptyRegexp(regexp) {
const filtered = regexp.filter((el) => el != null && el !== '');
return filtered;
}
/**
* Returns a function that will exclude the events in case they match
* with the regular expressions provided
* @param {(string|string[])} filters contains the regexp that will be used for the evaluation
* @param {*} appender
* @returns {function}
*/
function noLogFilter(filters, appender) {
return (logEvent) => {
debug(`Checking data: ${logEvent.data} against filters: ${filters}`);
if (typeof filters === 'string') {
filters = [filters];
}
filters = removeNullOrEmptyRegexp(filters);
const regex = new RegExp(filters.join('|'), 'i');
if (
filters.length === 0 ||
logEvent.data.findIndex((value) => regex.test(value)) < 0
) {
debug('Not excluded, sending to appender');
appender(logEvent);
}
};
}
function configure(config, layouts, findAppender) {
const appender = findAppender(config.appender);
return noLogFilter(config.exclude, appender);
}
module.exports.configure = configure;
| const debug = require('debug')('log4js:noLogFilter');
/**
* The function removes empty or null regexp from the array
* @param {string[]} regexp
* @returns {string[]} a filtered string array with not empty or null regexp
*/
function removeNullOrEmptyRegexp(regexp) {
const filtered = regexp.filter((el) => el != null && el !== '');
return filtered;
}
/**
* Returns a function that will exclude the events in case they match
* with the regular expressions provided
* @param {(string|string[])} filters contains the regexp that will be used for the evaluation
* @param {*} appender
* @returns {function}
*/
function noLogFilter(filters, appender) {
return (logEvent) => {
debug(`Checking data: ${logEvent.data} against filters: ${filters}`);
if (typeof filters === 'string') {
filters = [filters];
}
filters = removeNullOrEmptyRegexp(filters);
const regex = new RegExp(filters.join('|'), 'i');
if (
filters.length === 0 ||
logEvent.data.findIndex((value) => regex.test(value)) < 0
) {
debug('Not excluded, sending to appender');
appender(logEvent);
}
};
}
function configure(config, layouts, findAppender) {
const appender = findAppender(config.appender);
return noLogFilter(config.exclude, appender);
}
module.exports.configure = configure;
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/tcp-appender-test.js | const { test } = require('tap');
const net = require('net');
const flatted = require('flatted');
const sandbox = require('@log4js-node/sandboxed-module');
const log4js = require('../../lib/log4js');
const LoggingEvent = require('../../lib/LoggingEvent');
let messages = [];
let server = null;
function makeServer(config) {
server = net.createServer((socket) => {
socket.setEncoding('utf8');
socket.on('data', (data) => {
data
.split(config.endMsg)
.filter((s) => s.length)
.forEach((s) => {
messages.push(config.deserialise(s));
});
});
});
server.unref();
return server;
}
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
return false;
},
end() {
fakeNet.closeCalled = true;
},
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: '1.2.3.4',
remotePort: '1234',
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
},
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
},
};
},
};
}
test('TCP Appender', (batch) => {
batch.test('Default Configuration', (t) => {
messages = [];
const serverConfig = {
endMsg: '__LOG4JS__',
deserialise: (log) => LoggingEvent.deserialise(log),
};
server = makeServer(serverConfig);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
default: { type: 'tcp', port },
},
categories: {
default: { appenders: ['default'], level: 'debug' },
},
});
const logger = log4js.getLogger();
logger.info('This should be sent via TCP.');
logger.info('This should also be sent via TCP and not break things.');
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
data: ['This should be sent via TCP.'],
categoryName: 'default',
context: {},
level: { levelStr: 'INFO' },
});
t.match(messages[1], {
data: ['This should also be sent via TCP and not break things.'],
categoryName: 'default',
context: {},
level: { levelStr: 'INFO' },
});
t.end();
});
});
});
});
batch.test('Custom EndMessage String', (t) => {
messages = [];
const serverConfig = {
endMsg: '\n',
deserialise: (log) => LoggingEvent.deserialise(log),
};
server = makeServer(serverConfig);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
customEndMsg: { type: 'tcp', port, endMsg: '\n' },
},
categories: {
default: { appenders: ['customEndMsg'], level: 'debug' },
},
});
const logger = log4js.getLogger();
logger.info('This should be sent via TCP using a custom EndMsg string.');
logger.info(
'This should also be sent via TCP using a custom EndMsg string and not break things.'
);
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
data: ['This should be sent via TCP using a custom EndMsg string.'],
categoryName: 'default',
context: {},
level: { levelStr: 'INFO' },
});
t.match(messages[1], {
data: [
'This should also be sent via TCP using a custom EndMsg string and not break things.',
],
categoryName: 'default',
context: {},
level: { levelStr: 'INFO' },
});
t.end();
});
});
});
});
batch.test('Custom Layout', (t) => {
messages = [];
const serverConfig = {
endMsg: '__LOG4JS__',
deserialise: (log) => JSON.parse(log),
};
server = makeServer(serverConfig);
log4js.addLayout(
'json',
() =>
function(logEvent) {
return JSON.stringify({
time: logEvent.startTime,
message: logEvent.data[0],
level: logEvent.level.toString(),
});
}
);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
customLayout: {
type: 'tcp',
port,
layout: { type: 'json' },
},
},
categories: {
default: { appenders: ['customLayout'], level: 'debug' },
},
});
const logger = log4js.getLogger();
logger.info('This should be sent as a customized json.');
logger.info(
'This should also be sent via TCP as a customized json and not break things.'
);
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
message: 'This should be sent as a customized json.',
level: 'INFO',
});
t.match(messages[1], {
message:
'This should also be sent via TCP as a customized json and not break things.',
level: 'INFO',
});
t.end();
});
});
});
});
batch.test('when underlying stream errors', (t) => {
const fakeNet = makeFakeNet();
const sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
sandboxedLog4js.configure({
appenders: {
default: { type: 'tcp' },
},
categories: {
default: { appenders: ['default'], level: 'debug' },
},
});
const logger = sandboxedLog4js.getLogger();
logger.info('before connect');
t.test(
'should buffer messages written before socket is connected',
(assert) => {
assert.equal(fakeNet.data.length, 0);
assert.equal(fakeNet.createConnectionCalled, 1);
assert.end();
}
);
fakeNet.cbs.connect();
t.test('should flush buffered messages', (assert) => {
assert.equal(fakeNet.data.length, 1);
assert.equal(fakeNet.createConnectionCalled, 1);
assert.match(fakeNet.data[0], 'before connect');
assert.end();
});
logger.info('after connect');
t.test(
'should write log messages to socket as flatted strings with a terminator string',
(assert) => {
assert.equal(fakeNet.data.length, 2);
assert.match(fakeNet.data[0], 'before connect');
assert.ok(fakeNet.data[0].endsWith('__LOG4JS__'));
assert.match(fakeNet.data[1], 'after connect');
assert.ok(fakeNet.data[1].endsWith('__LOG4JS__'));
assert.equal(fakeNet.encoding, 'utf8');
assert.end();
}
);
fakeNet.cbs.error();
logger.info('after error, before close');
fakeNet.cbs.close();
logger.info('after close, before connect');
fakeNet.cbs.connect();
logger.info('after error, after connect');
t.test('should attempt to re-open the socket on error', (assert) => {
assert.equal(fakeNet.data.length, 5);
assert.equal(fakeNet.createConnectionCalled, 2);
assert.match(fakeNet.data[2], 'after error, before close');
assert.match(fakeNet.data[3], 'after close, before connect');
assert.match(fakeNet.data[4], 'after error, after connect');
assert.end();
});
t.test('should buffer messages until drain', (assert) => {
const previousLength = fakeNet.data.length;
logger.info('should not be flushed');
assert.equal(fakeNet.data.length, previousLength);
assert.notMatch(
fakeNet.data[fakeNet.data.length - 1],
'should not be flushed'
);
fakeNet.cbs.drain();
assert.equal(fakeNet.data.length, previousLength + 1);
assert.match(
fakeNet.data[fakeNet.data.length - 1],
'should not be flushed'
);
assert.end();
});
t.test('should serialize an Error correctly', (assert) => {
const previousLength = fakeNet.data.length;
logger.error(new Error('Error test'));
fakeNet.cbs.drain();
assert.equal(fakeNet.data.length, previousLength + 1);
const raw = fakeNet.data[fakeNet.data.length - 1];
const offset = raw.indexOf('__LOG4JS__');
assert.ok(
flatted.parse(raw.slice(0, offset !== -1 ? offset : 0)).data[0].stack,
`Expected:\n\n${fakeNet.data[6]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(raw.slice(0, offset !== -1 ? offset : 0))
.data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.end();
});
| const { test } = require('tap');
const net = require('net');
const flatted = require('flatted');
const sandbox = require('@log4js-node/sandboxed-module');
const log4js = require('../../lib/log4js');
const LoggingEvent = require('../../lib/LoggingEvent');
let messages = [];
let server = null;
function makeServer(config) {
server = net.createServer((socket) => {
socket.setEncoding('utf8');
socket.on('data', (data) => {
data
.split(config.endMsg)
.filter((s) => s.length)
.forEach((s) => {
messages.push(config.deserialise(s));
});
});
});
server.unref();
return server;
}
function makeFakeNet() {
return {
data: [],
cbs: {},
createConnectionCalled: 0,
createConnection(port, host) {
const fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
return false;
},
end() {
fakeNet.closeCalled = true;
},
};
},
createServer(cb) {
const fakeNet = this;
cb({
remoteAddress: '1.2.3.4',
remotePort: '1234',
setEncoding(encoding) {
fakeNet.encoding = encoding;
},
on(event, cb2) {
fakeNet.cbs[event] = cb2;
},
});
return {
listen(port, host) {
fakeNet.port = port;
fakeNet.host = host;
},
};
},
};
}
test('TCP Appender', (batch) => {
batch.test('Default Configuration', (t) => {
messages = [];
const serverConfig = {
endMsg: '__LOG4JS__',
deserialise: (log) => LoggingEvent.deserialise(log),
};
server = makeServer(serverConfig);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
default: { type: 'tcp', port },
},
categories: {
default: { appenders: ['default'], level: 'debug' },
},
});
const logger = log4js.getLogger();
logger.info('This should be sent via TCP.');
logger.info('This should also be sent via TCP and not break things.');
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
data: ['This should be sent via TCP.'],
categoryName: 'default',
context: {},
level: { levelStr: 'INFO' },
});
t.match(messages[1], {
data: ['This should also be sent via TCP and not break things.'],
categoryName: 'default',
context: {},
level: { levelStr: 'INFO' },
});
t.end();
});
});
});
});
batch.test('Custom EndMessage String', (t) => {
messages = [];
const serverConfig = {
endMsg: '\n',
deserialise: (log) => LoggingEvent.deserialise(log),
};
server = makeServer(serverConfig);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
customEndMsg: { type: 'tcp', port, endMsg: '\n' },
},
categories: {
default: { appenders: ['customEndMsg'], level: 'debug' },
},
});
const logger = log4js.getLogger();
logger.info('This should be sent via TCP using a custom EndMsg string.');
logger.info(
'This should also be sent via TCP using a custom EndMsg string and not break things.'
);
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
data: ['This should be sent via TCP using a custom EndMsg string.'],
categoryName: 'default',
context: {},
level: { levelStr: 'INFO' },
});
t.match(messages[1], {
data: [
'This should also be sent via TCP using a custom EndMsg string and not break things.',
],
categoryName: 'default',
context: {},
level: { levelStr: 'INFO' },
});
t.end();
});
});
});
});
batch.test('Custom Layout', (t) => {
messages = [];
const serverConfig = {
endMsg: '__LOG4JS__',
deserialise: (log) => JSON.parse(log),
};
server = makeServer(serverConfig);
log4js.addLayout(
'json',
() =>
function(logEvent) {
return JSON.stringify({
time: logEvent.startTime,
message: logEvent.data[0],
level: logEvent.level.toString(),
});
}
);
server.listen(() => {
const { port } = server.address();
log4js.configure({
appenders: {
customLayout: {
type: 'tcp',
port,
layout: { type: 'json' },
},
},
categories: {
default: { appenders: ['customLayout'], level: 'debug' },
},
});
const logger = log4js.getLogger();
logger.info('This should be sent as a customized json.');
logger.info(
'This should also be sent via TCP as a customized json and not break things.'
);
log4js.shutdown(() => {
server.close(() => {
t.equal(messages.length, 2);
t.match(messages[0], {
message: 'This should be sent as a customized json.',
level: 'INFO',
});
t.match(messages[1], {
message:
'This should also be sent via TCP as a customized json and not break things.',
level: 'INFO',
});
t.end();
});
});
});
});
batch.test('when underlying stream errors', (t) => {
const fakeNet = makeFakeNet();
const sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
net: fakeNet,
},
});
sandboxedLog4js.configure({
appenders: {
default: { type: 'tcp' },
},
categories: {
default: { appenders: ['default'], level: 'debug' },
},
});
const logger = sandboxedLog4js.getLogger();
logger.info('before connect');
t.test(
'should buffer messages written before socket is connected',
(assert) => {
assert.equal(fakeNet.data.length, 0);
assert.equal(fakeNet.createConnectionCalled, 1);
assert.end();
}
);
fakeNet.cbs.connect();
t.test('should flush buffered messages', (assert) => {
assert.equal(fakeNet.data.length, 1);
assert.equal(fakeNet.createConnectionCalled, 1);
assert.match(fakeNet.data[0], 'before connect');
assert.end();
});
logger.info('after connect');
t.test(
'should write log messages to socket as flatted strings with a terminator string',
(assert) => {
assert.equal(fakeNet.data.length, 2);
assert.match(fakeNet.data[0], 'before connect');
assert.ok(fakeNet.data[0].endsWith('__LOG4JS__'));
assert.match(fakeNet.data[1], 'after connect');
assert.ok(fakeNet.data[1].endsWith('__LOG4JS__'));
assert.equal(fakeNet.encoding, 'utf8');
assert.end();
}
);
fakeNet.cbs.error();
logger.info('after error, before close');
fakeNet.cbs.close();
logger.info('after close, before connect');
fakeNet.cbs.connect();
logger.info('after error, after connect');
t.test('should attempt to re-open the socket on error', (assert) => {
assert.equal(fakeNet.data.length, 5);
assert.equal(fakeNet.createConnectionCalled, 2);
assert.match(fakeNet.data[2], 'after error, before close');
assert.match(fakeNet.data[3], 'after close, before connect');
assert.match(fakeNet.data[4], 'after error, after connect');
assert.end();
});
t.test('should buffer messages until drain', (assert) => {
const previousLength = fakeNet.data.length;
logger.info('should not be flushed');
assert.equal(fakeNet.data.length, previousLength);
assert.notMatch(
fakeNet.data[fakeNet.data.length - 1],
'should not be flushed'
);
fakeNet.cbs.drain();
assert.equal(fakeNet.data.length, previousLength + 1);
assert.match(
fakeNet.data[fakeNet.data.length - 1],
'should not be flushed'
);
assert.end();
});
t.test('should serialize an Error correctly', (assert) => {
const previousLength = fakeNet.data.length;
logger.error(new Error('Error test'));
fakeNet.cbs.drain();
assert.equal(fakeNet.data.length, previousLength + 1);
const raw = fakeNet.data[fakeNet.data.length - 1];
const offset = raw.indexOf('__LOG4JS__');
assert.ok(
flatted.parse(raw.slice(0, offset !== -1 ? offset : 0)).data[0].stack,
`Expected:\n\n${fakeNet.data[6]}\n\n to have a 'data[0].stack' property`
);
const actual = flatted.parse(raw.slice(0, offset !== -1 ? offset : 0))
.data[0].stack;
assert.match(actual, /^Error: Error test/);
assert.end();
});
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/levels-before-configure-test.js | const { test } = require('tap');
test('Accessing things setup in configure before configure is called', (batch) => {
batch.test('should work', (t) => {
const log4js = require('../../lib/log4js');
t.ok(log4js.levels);
t.ok(log4js.connectLogger);
t.end();
});
batch.end();
});
| const { test } = require('tap');
test('Accessing things setup in configure before configure is called', (batch) => {
batch.test('should work', (t) => {
const log4js = require('../../lib/log4js');
t.ok(log4js.levels);
t.ok(log4js.connectLogger);
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/appender-dependencies-test.js | const { test } = require('tap');
const categories = {
default: { appenders: ['filtered'], level: 'debug' },
};
let log4js;
let recording;
test('log4js appender dependencies', (batch) => {
batch.beforeEach((done) => {
log4js = require('../../lib/log4js');
recording = require('../../lib/appenders/recording');
if (typeof done === 'function') {
done();
}
});
batch.afterEach((done) => {
recording.erase();
if (typeof done === 'function') {
done();
}
});
batch.test('in order', (t) => {
const config = {
categories,
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'logLevelFilter',
appender: 'recorder',
level: 'ERROR',
},
},
};
t.test('should resolve if defined in dependency order', (assert) => {
assert.doesNotThrow(() => {
log4js.configure(config);
}, 'this should not trigger an error');
assert.end();
});
const logger = log4js.getLogger('logLevelTest');
logger.debug('this should not trigger an event');
logger.error('this should, though');
const logEvents = recording.replay();
t.test('should process log events normally', (assert) => {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.end();
});
t.end();
});
batch.test('not in order', (t) => {
const config = {
categories,
appenders: {
filtered: {
type: 'logLevelFilter',
appender: 'recorder',
level: 'ERROR',
},
recorder: { type: 'recording' },
},
};
t.test('should resolve if defined out of dependency order', (assert) => {
assert.doesNotThrow(() => {
log4js.configure(config);
}, 'this should not trigger an error');
assert.end();
});
const logger = log4js.getLogger('logLevelTest');
logger.debug('this should not trigger an event');
logger.error('this should, though');
const logEvents = recording.replay();
t.test('should process log events normally', (assert) => {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.end();
});
t.end();
});
batch.test('with dependency loop', (t) => {
const config = {
categories,
appenders: {
filtered: {
type: 'logLevelFilter',
appender: 'filtered2',
level: 'ERROR',
},
filtered2: {
type: 'logLevelFilter',
appender: 'filtered',
level: 'ERROR',
},
recorder: { type: 'recording' },
},
};
t.test(
'should throw an error if if a dependency loop is found',
(assert) => {
assert.throws(() => {
log4js.configure(config);
}, 'Dependency loop detected for appender filtered.');
assert.end();
}
);
t.end();
});
batch.end();
});
| const { test } = require('tap');
const categories = {
default: { appenders: ['filtered'], level: 'debug' },
};
let log4js;
let recording;
test('log4js appender dependencies', (batch) => {
batch.beforeEach((done) => {
log4js = require('../../lib/log4js');
recording = require('../../lib/appenders/recording');
if (typeof done === 'function') {
done();
}
});
batch.afterEach((done) => {
recording.erase();
if (typeof done === 'function') {
done();
}
});
batch.test('in order', (t) => {
const config = {
categories,
appenders: {
recorder: { type: 'recording' },
filtered: {
type: 'logLevelFilter',
appender: 'recorder',
level: 'ERROR',
},
},
};
t.test('should resolve if defined in dependency order', (assert) => {
assert.doesNotThrow(() => {
log4js.configure(config);
}, 'this should not trigger an error');
assert.end();
});
const logger = log4js.getLogger('logLevelTest');
logger.debug('this should not trigger an event');
logger.error('this should, though');
const logEvents = recording.replay();
t.test('should process log events normally', (assert) => {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.end();
});
t.end();
});
batch.test('not in order', (t) => {
const config = {
categories,
appenders: {
filtered: {
type: 'logLevelFilter',
appender: 'recorder',
level: 'ERROR',
},
recorder: { type: 'recording' },
},
};
t.test('should resolve if defined out of dependency order', (assert) => {
assert.doesNotThrow(() => {
log4js.configure(config);
}, 'this should not trigger an error');
assert.end();
});
const logger = log4js.getLogger('logLevelTest');
logger.debug('this should not trigger an event');
logger.error('this should, though');
const logEvents = recording.replay();
t.test('should process log events normally', (assert) => {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.end();
});
t.end();
});
batch.test('with dependency loop', (t) => {
const config = {
categories,
appenders: {
filtered: {
type: 'logLevelFilter',
appender: 'filtered2',
level: 'ERROR',
},
filtered2: {
type: 'logLevelFilter',
appender: 'filtered',
level: 'ERROR',
},
recorder: { type: 'recording' },
},
};
t.test(
'should throw an error if if a dependency loop is found',
(assert) => {
assert.throws(() => {
log4js.configure(config);
}, 'Dependency loop detected for appender filtered.');
assert.end();
}
);
t.end();
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/file-sighup-test.js | const { test } = require('tap');
const path = require('path');
const fs = require('fs');
const sandbox = require('@log4js-node/sandboxed-module');
const osDelay = process.platform === 'win32' ? 400 : 200;
const removeFiles = async (filenames) => {
if (!Array.isArray(filenames)) filenames = [filenames];
const promises = filenames.map((filename) => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
test('file appender single SIGHUP handler', (t) => {
const initialListeners = process.listenerCount('SIGHUP');
let warning;
const originalListener = process.listeners('warning')[
process.listeners('warning').length - 1
];
const warningListener = (error) => {
if (
error.type === 'SIGHUP' &&
error.name === 'MaxListenersExceededWarning'
) {
warning = error;
return;
}
originalListener(error);
};
process.off('warning', originalListener);
process.on('warning', warningListener);
const config = {
appenders: {},
categories: {
default: { appenders: [], level: 'debug' },
},
};
// create 11 appenders to make nodejs warn for >10 max listeners
const numOfAppenders = 11;
for (let i = 1; i <= numOfAppenders; i++) {
config.appenders[`app${i}`] = {
type: 'file',
filename: path.join(__dirname, `file${i}.log`),
};
config.categories.default.appenders.push(`app${i}`);
}
const log4js = require('../../lib/log4js');
log4js.configure(config);
t.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off('warning', warningListener);
process.on('warning', originalListener);
});
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
const filenames = Object.values(config.appenders).map(
(appender) => appender.filename
);
await removeFiles(filenames);
});
t.plan(2);
// next event loop to allow event emitter/listener to happen
setImmediate(() => {
t.notOk(warning, 'should not have MaxListenersExceededWarning for SIGHUP');
t.equal(
process.listenerCount('SIGHUP') - initialListeners,
1,
'should be 1 SIGHUP listener'
);
t.end();
});
});
test('file appender SIGHUP', (t) => {
let closeCalled = 0;
let openCalled = 0;
sandbox
.require('../../lib/appenders/file', {
requires: {
streamroller: {
RollingFileStream: class RollingFileStream {
constructor() {
openCalled++;
this.ended = false;
}
on() {
this.dummy = 'easier than turning off lint rule';
}
end(cb) {
this.ended = true;
closeCalled++;
cb();
}
write() {
if (this.ended) {
throw new Error('write after end');
}
return true;
}
},
},
},
})
.configure(
{ type: 'file', filename: 'sighup-test-file' },
{
basicLayout() {
return 'whatever';
},
}
);
process.emit('SIGHUP', 'SIGHUP', 1);
t.plan(2);
setTimeout(() => {
t.equal(openCalled, 2, 'open should be called twice');
t.equal(closeCalled, 1, 'close should be called once');
t.end();
}, osDelay);
});
test('file appender SIGHUP handler leak', (t) => {
const log4js = require('../../lib/log4js');
const initialListeners = process.listenerCount('SIGHUP');
log4js.configure({
appenders: {
file: { type: 'file', filename: 'test.log' },
},
categories: { default: { appenders: ['file'], level: 'info' } },
});
t.teardown(async () => {
await removeFiles('test.log');
});
t.plan(2);
t.equal(process.listenerCount('SIGHUP'), initialListeners + 1);
log4js.shutdown(() => {
t.equal(process.listenerCount('SIGHUP'), initialListeners);
t.end();
});
});
| const { test } = require('tap');
const path = require('path');
const fs = require('fs');
const sandbox = require('@log4js-node/sandboxed-module');
const osDelay = process.platform === 'win32' ? 400 : 200;
const removeFiles = async (filenames) => {
if (!Array.isArray(filenames)) filenames = [filenames];
const promises = filenames.map((filename) => fs.promises.unlink(filename));
await Promise.allSettled(promises);
};
test('file appender single SIGHUP handler', (t) => {
const initialListeners = process.listenerCount('SIGHUP');
let warning;
const originalListener = process.listeners('warning')[
process.listeners('warning').length - 1
];
const warningListener = (error) => {
if (
error.type === 'SIGHUP' &&
error.name === 'MaxListenersExceededWarning'
) {
warning = error;
return;
}
originalListener(error);
};
process.off('warning', originalListener);
process.on('warning', warningListener);
const config = {
appenders: {},
categories: {
default: { appenders: [], level: 'debug' },
},
};
// create 11 appenders to make nodejs warn for >10 max listeners
const numOfAppenders = 11;
for (let i = 1; i <= numOfAppenders; i++) {
config.appenders[`app${i}`] = {
type: 'file',
filename: path.join(__dirname, `file${i}.log`),
};
config.categories.default.appenders.push(`app${i}`);
}
const log4js = require('../../lib/log4js');
log4js.configure(config);
t.teardown(async () => {
// next event loop so that past warnings will not be printed
setImmediate(() => {
process.off('warning', warningListener);
process.on('warning', originalListener);
});
await new Promise((resolve) => {
log4js.shutdown(resolve);
});
const filenames = Object.values(config.appenders).map(
(appender) => appender.filename
);
await removeFiles(filenames);
});
t.plan(2);
// next event loop to allow event emitter/listener to happen
setImmediate(() => {
t.notOk(warning, 'should not have MaxListenersExceededWarning for SIGHUP');
t.equal(
process.listenerCount('SIGHUP') - initialListeners,
1,
'should be 1 SIGHUP listener'
);
t.end();
});
});
test('file appender SIGHUP', (t) => {
let closeCalled = 0;
let openCalled = 0;
sandbox
.require('../../lib/appenders/file', {
requires: {
streamroller: {
RollingFileStream: class RollingFileStream {
constructor() {
openCalled++;
this.ended = false;
}
on() {
this.dummy = 'easier than turning off lint rule';
}
end(cb) {
this.ended = true;
closeCalled++;
cb();
}
write() {
if (this.ended) {
throw new Error('write after end');
}
return true;
}
},
},
},
})
.configure(
{ type: 'file', filename: 'sighup-test-file' },
{
basicLayout() {
return 'whatever';
},
}
);
process.emit('SIGHUP', 'SIGHUP', 1);
t.plan(2);
setTimeout(() => {
t.equal(openCalled, 2, 'open should be called twice');
t.equal(closeCalled, 1, 'close should be called once');
t.end();
}, osDelay);
});
test('file appender SIGHUP handler leak', (t) => {
const log4js = require('../../lib/log4js');
const initialListeners = process.listenerCount('SIGHUP');
log4js.configure({
appenders: {
file: { type: 'file', filename: 'test.log' },
},
categories: { default: { appenders: ['file'], level: 'info' } },
});
t.teardown(async () => {
await removeFiles('test.log');
});
t.plan(2);
t.equal(process.listenerCount('SIGHUP'), initialListeners + 1);
log4js.shutdown(() => {
t.equal(process.listenerCount('SIGHUP'), initialListeners);
t.end();
});
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./lib/appenders/dateFile.js | const streams = require('streamroller');
const os = require('os');
const eol = os.EOL;
function openTheStream(filename, pattern, options) {
const stream = new streams.DateRollingFileStream(filename, pattern, options);
stream.on('error', (err) => {
// eslint-disable-next-line no-console
console.error(
'log4js.dateFileAppender - Writing to file %s, error happened ',
filename,
err
);
});
stream.on('drain', () => {
process.emit('log4js:pause', false);
});
return stream;
}
/**
* File appender that rolls files according to a date pattern.
* @param filename base filename.
* @param pattern the format that will be added to the end of filename when rolling,
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
* @param layout layout function for log messages - defaults to basicLayout
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function appender(filename, pattern, layout, options, timezoneOffset) {
// the options for file appender use maxLogSize, but the docs say any file appender
// options should work for dateFile as well.
options.maxSize = options.maxLogSize;
const writer = openTheStream(filename, pattern, options);
const app = function(logEvent) {
if (!writer.writable) {
return;
}
if (!writer.write(layout(logEvent, timezoneOffset) + eol, 'utf8')) {
process.emit('log4js:pause', true);
}
};
app.shutdown = function(complete) {
writer.end('', 'utf-8', complete);
};
return app;
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (!config.alwaysIncludePattern) {
config.alwaysIncludePattern = false;
}
// security default (instead of relying on streamroller default)
config.mode = config.mode || 0o600;
return appender(
config.filename,
config.pattern,
layout,
config,
config.timezoneOffset
);
}
module.exports.configure = configure;
| const streams = require('streamroller');
const os = require('os');
const eol = os.EOL;
function openTheStream(filename, pattern, options) {
const stream = new streams.DateRollingFileStream(filename, pattern, options);
stream.on('error', (err) => {
// eslint-disable-next-line no-console
console.error(
'log4js.dateFileAppender - Writing to file %s, error happened ',
filename,
err
);
});
stream.on('drain', () => {
process.emit('log4js:pause', false);
});
return stream;
}
/**
* File appender that rolls files according to a date pattern.
* @param filename base filename.
* @param pattern the format that will be added to the end of filename when rolling,
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
* @param layout layout function for log messages - defaults to basicLayout
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function appender(filename, pattern, layout, options, timezoneOffset) {
// the options for file appender use maxLogSize, but the docs say any file appender
// options should work for dateFile as well.
options.maxSize = options.maxLogSize;
const writer = openTheStream(filename, pattern, options);
const app = function(logEvent) {
if (!writer.writable) {
return;
}
if (!writer.write(layout(logEvent, timezoneOffset) + eol, 'utf8')) {
process.emit('log4js:pause', true);
}
};
app.shutdown = function(complete) {
writer.end('', 'utf-8', complete);
};
return app;
}
function configure(config, layouts) {
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (!config.alwaysIncludePattern) {
config.alwaysIncludePattern = false;
}
// security default (instead of relying on streamroller default)
config.mode = config.mode || 0o600;
return appender(
config.filename,
config.pattern,
layout,
config,
config.timezoneOffset
);
}
module.exports.configure = configure;
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/subcategories-test.js | const { test } = require('tap');
const log4js = require('../../lib/log4js');
test('subcategories', (batch) => {
batch.test('loggers created after levels configuration is loaded', (t) => {
log4js.configure({
appenders: { stdout: { type: 'stdout' } },
categories: {
default: { appenders: ['stdout'], level: 'TRACE' },
sub1: { appenders: ['stdout'], level: 'WARN' },
'sub1.sub11': { appenders: ['stdout'], level: 'TRACE' },
'sub1.sub11.sub111': { appenders: ['stdout'], level: 'WARN' },
'sub1.sub12': { appenders: ['stdout'], level: 'INFO' },
},
});
const loggers = {
sub1: log4js.getLogger('sub1'), // WARN
sub11: log4js.getLogger('sub1.sub11'), // TRACE
sub111: log4js.getLogger('sub1.sub11.sub111'), // WARN
sub12: log4js.getLogger('sub1.sub12'), // INFO
sub13: log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
sub112: log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
sub121: log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
sub0: log4js.getLogger('sub0'), // Not defined, not inherited: TRACE
};
t.test('check logger levels', (assert) => {
assert.equal(loggers.sub1.level, log4js.levels.WARN);
assert.equal(loggers.sub11.level, log4js.levels.TRACE);
assert.equal(loggers.sub111.level, log4js.levels.WARN);
assert.equal(loggers.sub12.level, log4js.levels.INFO);
assert.equal(loggers.sub13.level, log4js.levels.WARN);
assert.equal(loggers.sub112.level, log4js.levels.TRACE);
assert.equal(loggers.sub121.level, log4js.levels.INFO);
assert.equal(loggers.sub0.level, log4js.levels.TRACE);
assert.end();
});
t.end();
});
batch.test('loggers created before levels configuration is loaded', (t) => {
// reset to defaults
log4js.configure({
appenders: { stdout: { type: 'stdout' } },
categories: { default: { appenders: ['stdout'], level: 'info' } },
});
// these should all get the default log level of INFO
const loggers = {
sub1: log4js.getLogger('sub1'), // WARN
sub11: log4js.getLogger('sub1.sub11'), // TRACE
sub111: log4js.getLogger('sub1.sub11.sub111'), // WARN
sub12: log4js.getLogger('sub1.sub12'), // INFO
sub13: log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
sub112: log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
sub121: log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
sub0: log4js.getLogger('sub0'), // Not defined, not inherited: TRACE
};
log4js.configure({
appenders: { stdout: { type: 'stdout' } },
categories: {
default: { appenders: ['stdout'], level: 'TRACE' },
sub1: { appenders: ['stdout'], level: 'WARN' },
'sub1.sub11': { appenders: ['stdout'], level: 'TRACE' },
'sub1.sub11.sub111': { appenders: ['stdout'], level: 'WARN' },
'sub1.sub12': { appenders: ['stdout'], level: 'INFO' },
},
});
t.test('should still get new levels', (assert) => {
// can't use .equal because by calling log4js.configure we create new instances
assert.same(loggers.sub1.level, log4js.levels.WARN);
assert.same(loggers.sub11.level, log4js.levels.TRACE);
assert.same(loggers.sub111.level, log4js.levels.WARN);
assert.same(loggers.sub12.level, log4js.levels.INFO);
assert.same(loggers.sub13.level, log4js.levels.WARN);
assert.same(loggers.sub112.level, log4js.levels.TRACE);
assert.same(loggers.sub121.level, log4js.levels.INFO);
assert.same(loggers.sub0.level, log4js.levels.TRACE);
assert.end();
});
t.end();
});
batch.test(
'setting level on subcategories should not set parent level',
(t) => {
log4js.configure({
appenders: { stdout: { type: 'stdout' } },
categories: {
default: { appenders: ['stdout'], level: 'trace' },
parent: { appenders: ['stdout'], level: 'error' },
},
});
const logger = log4js.getLogger('parent');
const subLogger = log4js.getLogger('parent.child');
t.test('should inherit parent level', (assert) => {
assert.same(subLogger.level, log4js.levels.ERROR);
assert.end();
});
t.test(
'changing child level should not change parent level',
(assert) => {
subLogger.level = 'info';
assert.same(subLogger.level, log4js.levels.INFO);
assert.same(logger.level, log4js.levels.ERROR);
assert.end();
}
);
t.end();
}
);
batch.end();
});
| const { test } = require('tap');
const log4js = require('../../lib/log4js');
test('subcategories', (batch) => {
batch.test('loggers created after levels configuration is loaded', (t) => {
log4js.configure({
appenders: { stdout: { type: 'stdout' } },
categories: {
default: { appenders: ['stdout'], level: 'TRACE' },
sub1: { appenders: ['stdout'], level: 'WARN' },
'sub1.sub11': { appenders: ['stdout'], level: 'TRACE' },
'sub1.sub11.sub111': { appenders: ['stdout'], level: 'WARN' },
'sub1.sub12': { appenders: ['stdout'], level: 'INFO' },
},
});
const loggers = {
sub1: log4js.getLogger('sub1'), // WARN
sub11: log4js.getLogger('sub1.sub11'), // TRACE
sub111: log4js.getLogger('sub1.sub11.sub111'), // WARN
sub12: log4js.getLogger('sub1.sub12'), // INFO
sub13: log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
sub112: log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
sub121: log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
sub0: log4js.getLogger('sub0'), // Not defined, not inherited: TRACE
};
t.test('check logger levels', (assert) => {
assert.equal(loggers.sub1.level, log4js.levels.WARN);
assert.equal(loggers.sub11.level, log4js.levels.TRACE);
assert.equal(loggers.sub111.level, log4js.levels.WARN);
assert.equal(loggers.sub12.level, log4js.levels.INFO);
assert.equal(loggers.sub13.level, log4js.levels.WARN);
assert.equal(loggers.sub112.level, log4js.levels.TRACE);
assert.equal(loggers.sub121.level, log4js.levels.INFO);
assert.equal(loggers.sub0.level, log4js.levels.TRACE);
assert.end();
});
t.end();
});
batch.test('loggers created before levels configuration is loaded', (t) => {
// reset to defaults
log4js.configure({
appenders: { stdout: { type: 'stdout' } },
categories: { default: { appenders: ['stdout'], level: 'info' } },
});
// these should all get the default log level of INFO
const loggers = {
sub1: log4js.getLogger('sub1'), // WARN
sub11: log4js.getLogger('sub1.sub11'), // TRACE
sub111: log4js.getLogger('sub1.sub11.sub111'), // WARN
sub12: log4js.getLogger('sub1.sub12'), // INFO
sub13: log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
sub112: log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
sub121: log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
sub0: log4js.getLogger('sub0'), // Not defined, not inherited: TRACE
};
log4js.configure({
appenders: { stdout: { type: 'stdout' } },
categories: {
default: { appenders: ['stdout'], level: 'TRACE' },
sub1: { appenders: ['stdout'], level: 'WARN' },
'sub1.sub11': { appenders: ['stdout'], level: 'TRACE' },
'sub1.sub11.sub111': { appenders: ['stdout'], level: 'WARN' },
'sub1.sub12': { appenders: ['stdout'], level: 'INFO' },
},
});
t.test('should still get new levels', (assert) => {
// can't use .equal because by calling log4js.configure we create new instances
assert.same(loggers.sub1.level, log4js.levels.WARN);
assert.same(loggers.sub11.level, log4js.levels.TRACE);
assert.same(loggers.sub111.level, log4js.levels.WARN);
assert.same(loggers.sub12.level, log4js.levels.INFO);
assert.same(loggers.sub13.level, log4js.levels.WARN);
assert.same(loggers.sub112.level, log4js.levels.TRACE);
assert.same(loggers.sub121.level, log4js.levels.INFO);
assert.same(loggers.sub0.level, log4js.levels.TRACE);
assert.end();
});
t.end();
});
batch.test(
'setting level on subcategories should not set parent level',
(t) => {
log4js.configure({
appenders: { stdout: { type: 'stdout' } },
categories: {
default: { appenders: ['stdout'], level: 'trace' },
parent: { appenders: ['stdout'], level: 'error' },
},
});
const logger = log4js.getLogger('parent');
const subLogger = log4js.getLogger('parent.child');
t.test('should inherit parent level', (assert) => {
assert.same(subLogger.level, log4js.levels.ERROR);
assert.end();
});
t.test(
'changing child level should not change parent level',
(assert) => {
subLogger.level = 'info';
assert.same(subLogger.level, log4js.levels.INFO);
assert.same(logger.level, log4js.levels.ERROR);
assert.end();
}
);
t.end();
}
);
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./examples/custom-layout.js | const log4js = require('../lib/log4js');
log4js.addLayout(
'json',
(config) =>
function(logEvent) {
return JSON.stringify(logEvent) + config.separator;
}
);
log4js.configure({
appenders: {
out: { type: 'stdout', layout: { type: 'json', separator: ',' } },
},
categories: {
default: { appenders: ['out'], level: 'info' },
},
});
const logger = log4js.getLogger('json-test');
logger.info('this is just a test');
logger.error('of a custom appender');
logger.warn('that outputs json');
log4js.shutdown(() => {});
| const log4js = require('../lib/log4js');
log4js.addLayout(
'json',
(config) =>
function(logEvent) {
return JSON.stringify(logEvent) + config.separator;
}
);
log4js.configure({
appenders: {
out: { type: 'stdout', layout: { type: 'json', separator: ',' } },
},
categories: {
default: { appenders: ['out'], level: 'info' },
},
});
const logger = log4js.getLogger('json-test');
logger.info('this is just a test');
logger.error('of a custom appender');
logger.warn('that outputs json');
log4js.shutdown(() => {});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./examples/log-rolling-bug.js | const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
handler: {
type: 'file',
filename: 'logs/handler.log',
maxLogSize: 100000,
backups: 5,
keepFileExt: true,
compress: true,
},
},
categories: {
default: { appenders: ['handler'], level: 'debug' },
handler: { appenders: ['handler'], level: 'debug' },
},
});
const logsToTest = ['handler'];
const logStartDate = new Date();
const loggers = logsToTest.map((log) => log4js.getLogger(log));
// write out a lot
setInterval(() => {
loggers.forEach((logger) =>
logger.info(`TESTING LOGGER!!!!!!${logStartDate}`)
);
}, 10);
| const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
handler: {
type: 'file',
filename: 'logs/handler.log',
maxLogSize: 100000,
backups: 5,
keepFileExt: true,
compress: true,
},
},
categories: {
default: { appenders: ['handler'], level: 'debug' },
handler: { appenders: ['handler'], level: 'debug' },
},
});
const logsToTest = ['handler'];
const logStartDate = new Date();
const loggers = logsToTest.map((log) => log4js.getLogger(log));
// write out a lot
setInterval(() => {
loggers.forEach((logger) =>
logger.info(`TESTING LOGGER!!!!!!${logStartDate}`)
);
}, 10);
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./examples/logFaces-appender.js | const log4js = require('../lib/log4js');
/*
logFaces server configured with UDP receiver, using JSON format,
listening on port 55201 will receive the logs from the appender below.
*/
log4js.configure({
appenders: {
logFaces: {
type: '@log4js-node/logfaces-udp', // (mandatory) appender type
application: 'MY-NODEJS', // (optional) name of the application (domain)
remoteHost: 'localhost', // (optional) logFaces server host or IP address
port: 55201, // (optional) logFaces UDP receiver port (must use JSON format)
layout: {
// (optional) the layout to use for messages
type: 'pattern',
pattern: '%m',
},
},
},
categories: { default: { appenders: ['logFaces'], level: 'info' } },
});
const logger = log4js.getLogger('myLogger');
logger.info('Testing message %s', 'arg1');
| const log4js = require('../lib/log4js');
/*
logFaces server configured with UDP receiver, using JSON format,
listening on port 55201 will receive the logs from the appender below.
*/
log4js.configure({
appenders: {
logFaces: {
type: '@log4js-node/logfaces-udp', // (mandatory) appender type
application: 'MY-NODEJS', // (optional) name of the application (domain)
remoteHost: 'localhost', // (optional) logFaces server host or IP address
port: 55201, // (optional) logFaces UDP receiver port (must use JSON format)
layout: {
// (optional) the layout to use for messages
type: 'pattern',
pattern: '%m',
},
},
},
categories: { default: { appenders: ['logFaces'], level: 'info' } },
});
const logger = log4js.getLogger('myLogger');
logger.info('Testing message %s', 'arg1');
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./docs/categoryFilter.md | # Category Filter
This is not strictly an appender - it wraps around another appender and stops log events from specific categories from being written to that appender. This could be useful when debugging your application, but you have one component that logs noisily, or is irrelevant to your investigation.
## Configuration
- `type` - `"categoryFilter"`
- `exclude` - `string | Array<string>` - the category (or categories if you provide an array of values) that will be excluded from the appender.
- `appender` - `string` - the name of the appender to filter.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
"no-noise": {
type: "categoryFilter",
exclude: "noisy.component",
appender: "everything",
},
},
categories: {
default: { appenders: ["no-noise"], level: "debug" },
},
});
const logger = log4js.getLogger();
const noisyLogger = log4js.getLogger("noisy.component");
logger.debug("I will be logged in all-the-logs.log");
noisyLogger.debug("I will not be logged.");
```
Note that you can achieve the same outcome without using the category filter, like this:
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
},
categories: {
default: { appenders: ["everything"], level: "debug" },
"noisy.component": { appenders: ["everything"], level: "off" },
},
});
const logger = log4js.getLogger();
const noisyLogger = log4js.getLogger("noisy.component");
logger.debug("I will be logged in all-the-logs.log");
noisyLogger.debug("I will not be logged.");
```
Category filter becomes useful when you have many categories you want to exclude, passing them as an array.
| # Category Filter
This is not strictly an appender - it wraps around another appender and stops log events from specific categories from being written to that appender. This could be useful when debugging your application, but you have one component that logs noisily, or is irrelevant to your investigation.
## Configuration
- `type` - `"categoryFilter"`
- `exclude` - `string | Array<string>` - the category (or categories if you provide an array of values) that will be excluded from the appender.
- `appender` - `string` - the name of the appender to filter.
## Example
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
"no-noise": {
type: "categoryFilter",
exclude: "noisy.component",
appender: "everything",
},
},
categories: {
default: { appenders: ["no-noise"], level: "debug" },
},
});
const logger = log4js.getLogger();
const noisyLogger = log4js.getLogger("noisy.component");
logger.debug("I will be logged in all-the-logs.log");
noisyLogger.debug("I will not be logged.");
```
Note that you can achieve the same outcome without using the category filter, like this:
```javascript
log4js.configure({
appenders: {
everything: { type: "file", filename: "all-the-logs.log" },
},
categories: {
default: { appenders: ["everything"], level: "debug" },
"noisy.component": { appenders: ["everything"], level: "off" },
},
});
const logger = log4js.getLogger();
const noisyLogger = log4js.getLogger("noisy.component");
logger.debug("I will be logged in all-the-logs.log");
noisyLogger.debug("I will not be logged.");
```
Category filter becomes useful when you have many categories you want to exclude, passing them as an array.
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./test/tap/fileSyncAppender-test.js | const { test } = require('tap');
const fs = require('fs');
const path = require('path');
const EOL = require('os').EOL || '\n';
const sandbox = require('@log4js-node/sandboxed-module');
const log4js = require('../../lib/log4js');
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
// doesn't really matter if it failed
}
}
test('log4js fileSyncAppender', (batch) => {
batch.test('with default fileSyncAppender settings', (t) => {
const testFile = path.join(__dirname, '/fa-default-sync-test.log');
const logger = log4js.getLogger('default-settings');
remove(testFile);
t.teardown(() => {
remove(testFile);
});
log4js.configure({
appenders: { sync: { type: 'fileSync', filename: testFile } },
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This should be in the file.');
fs.readFile(testFile, 'utf8', (err, fileContents) => {
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
batch.test('with existing file', (t) => {
const testFile = path.join(__dirname, '/fa-existing-file-sync-test.log');
const logger = log4js.getLogger('default-settings');
remove(testFile);
t.teardown(() => {
remove(testFile);
});
log4js.configure({
appenders: { sync: { type: 'fileSync', filename: testFile } },
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This should be in the file.');
log4js.shutdown(() => {
log4js.configure({
appenders: { sync: { type: 'fileSync', filename: testFile } },
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This should also be in the file.');
fs.readFile(testFile, 'utf8', (err, fileContents) => {
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(fileContents, `This should also be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test('should give error if invalid filename', async (t) => {
const file = '';
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'fileSync',
filename: file,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
new Error(`Invalid filename: ${file}`)
);
const dir = `.${path.sep}`;
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'fileSync',
filename: dir,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
new Error(`Filename is a directory: ${dir}`)
);
t.end();
});
batch.test('should give error if invalid maxLogSize', async (t) => {
const maxLogSize = -1;
const expectedError = new Error(`maxLogSize (${maxLogSize}) should be > 0`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'fileSync',
filename: path.join(
__dirname,
'fa-invalidMaxFileSize-sync-test.log'
),
maxLogSize: -1,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
expectedError
);
t.end();
});
batch.test('with a max file size and no backups', (t) => {
const testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log');
const logger = log4js.getLogger('max-file-size');
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
maxLogSize: 100,
backups: 0,
},
},
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This is the first log message.');
logger.info('This is an intermediate log message.');
logger.info('This is the second log message.');
t.test('log file should only contain the second message', (assert) => {
fs.readFile(testFile, 'utf8', (err, fileContents) => {
assert.match(fileContents, `This is the second log message.${EOL}`);
assert.equal(
fileContents.indexOf('This is the first log message.'),
-1
);
assert.end();
});
});
t.test('there should be one test files', (assert) => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-sync-test.log')
);
assert.equal(logFiles.length, 1);
assert.end();
});
});
t.end();
});
batch.test('with a max file size in unit mode and no backups', (t) => {
const testFile = path.join(__dirname, '/fa-maxFileSize-unit-sync-test.log');
const logger = log4js.getLogger('max-file-size-unit');
remove(testFile);
remove(`${testFile}.1`);
t.teardown(() => {
remove(testFile);
remove(`${testFile}.1`);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
maxLogSize: '1K',
backups: 0,
layout: { type: 'messagePassThrough' },
},
},
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
const maxLine = 22; // 1024 max file size / 47 bytes per line
for (let i = 0; i < maxLine; i++) {
logger.info('These are the log messages for the first file.'); // 46 bytes per line + '\n'
}
logger.info('This is the second log message.');
t.test('log file should only contain the second message', (assert) => {
fs.readFile(testFile, 'utf8', (err, fileContents) => {
assert.match(fileContents, `This is the second log message.${EOL}`);
assert.notMatch(
fileContents,
'These are the log messages for the first file.'
);
assert.end();
});
});
t.test('there should be one test file', (assert) => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-unit-sync-test.log')
);
assert.equal(logFiles.length, 1);
assert.end();
});
});
t.end();
});
batch.test('with a max file size and 2 backups', (t) => {
const testFile = path.join(
__dirname,
'/fa-maxFileSize-with-backups-sync-test.log'
);
const logger = log4js.getLogger('max-file-size-backups');
remove(testFile);
remove(`${testFile}.1`);
remove(`${testFile}.2`);
t.teardown(() => {
remove(testFile);
remove(`${testFile}.1`);
remove(`${testFile}.2`);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
maxLogSize: 50,
backups: 2,
},
},
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This is the first log message.');
logger.info('This is the second log message.');
logger.info('This is the third log message.');
logger.info('This is the fourth log message.');
t.test('the log files', (assert) => {
assert.plan(5);
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-with-backups-sync-test.log')
);
assert.equal(logFiles.length, 3, 'should be 3 files');
assert.same(
logFiles,
[
'fa-maxFileSize-with-backups-sync-test.log',
'fa-maxFileSize-with-backups-sync-test.log.1',
'fa-maxFileSize-with-backups-sync-test.log.2',
],
'should be named in sequence'
);
fs.readFile(
path.join(__dirname, logFiles[0]),
'utf8',
(e, contents) => {
assert.match(contents, 'This is the fourth log message.');
}
);
fs.readFile(
path.join(__dirname, logFiles[1]),
'utf8',
(e, contents) => {
assert.match(contents, 'This is the third log message.');
}
);
fs.readFile(
path.join(__dirname, logFiles[2]),
'utf8',
(e, contents) => {
assert.match(contents, 'This is the second log message.');
}
);
});
});
t.end();
});
batch.test('configure with fileSyncAppender', (t) => {
const testFile = 'tmp-sync-tests.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// this config defines one file appender (to ./tmp-sync-tests.log)
// and sets the log level for "tests" to WARN
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
tests: { appenders: ['sync'], level: 'warn' },
},
});
const logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile(testFile, 'utf8', (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(contents.indexOf('this should not be written to the file'), -1);
t.end();
});
});
batch.test(
'configure with non-existent multi-directory (recursive, nodejs >= 10.12.0)',
(t) => {
const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-recursive.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync('tmpA/tmpB/tmpC');
fs.rmdirSync('tmpA/tmpB');
fs.rmdirSync('tmpA');
} catch (e) {
// doesn't matter
}
});
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
});
const logger = log4js.getLogger();
logger.info('this should be written to the file');
fs.readFile(testFile, 'utf8', (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.end();
});
}
);
batch.test(
'configure with non-existent multi-directory (non-recursive, nodejs < 10.12.0)',
(t) => {
const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-non-recursive.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync('tmpA/tmpB/tmpC');
fs.rmdirSync('tmpA/tmpB');
fs.rmdirSync('tmpA');
} catch (e) {
// doesn't matter
}
});
const sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
fs: {
...fs,
mkdirSync(dirPath, options) {
return fs.mkdirSync(dirPath, {
...options,
...{ recursive: false },
});
},
},
},
});
sandboxedLog4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
});
const logger = sandboxedLog4js.getLogger();
logger.info('this should be written to the file');
fs.readFile(testFile, 'utf8', (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.end();
});
}
);
batch.test(
'configure with non-existent multi-directory (error handling)',
(t) => {
const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-error-handling.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync('tmpA/tmpB/tmpC');
fs.rmdirSync('tmpA/tmpB');
fs.rmdirSync('tmpA');
} catch (e) {
// doesn't matter
}
});
const errorEPERM = new Error('EPERM');
errorEPERM.code = 'EPERM';
let sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEPERM;
},
},
},
});
t.throws(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
}),
errorEPERM
);
const errorEROFS = new Error('EROFS');
errorEROFS.code = 'EROFS';
sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEROFS;
},
statSync() {
return {
isDirectory() {
return false;
},
};
},
},
},
});
t.throws(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
}),
errorEROFS
);
fs.mkdirSync('tmpA');
fs.mkdirSync('tmpA/tmpB');
fs.mkdirSync('tmpA/tmpB/tmpC');
sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEROFS;
},
},
},
});
t.doesNotThrow(() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
})
);
t.end();
}
);
batch.test('test options', (t) => {
const testFile = 'tmp-options-tests.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// using non-standard options
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
flags: 'w',
encoding: 'ascii',
mode: 0o666,
},
},
categories: {
default: { appenders: ['sync'], level: 'info' },
},
});
const logger = log4js.getLogger();
logger.warn('log message');
fs.readFile(testFile, 'ascii', (err, contents) => {
t.match(contents, `log message${EOL}`);
t.end();
});
});
batch.end();
});
| const { test } = require('tap');
const fs = require('fs');
const path = require('path');
const EOL = require('os').EOL || '\n';
const sandbox = require('@log4js-node/sandboxed-module');
const log4js = require('../../lib/log4js');
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
// doesn't really matter if it failed
}
}
test('log4js fileSyncAppender', (batch) => {
batch.test('with default fileSyncAppender settings', (t) => {
const testFile = path.join(__dirname, '/fa-default-sync-test.log');
const logger = log4js.getLogger('default-settings');
remove(testFile);
t.teardown(() => {
remove(testFile);
});
log4js.configure({
appenders: { sync: { type: 'fileSync', filename: testFile } },
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This should be in the file.');
fs.readFile(testFile, 'utf8', (err, fileContents) => {
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
batch.test('with existing file', (t) => {
const testFile = path.join(__dirname, '/fa-existing-file-sync-test.log');
const logger = log4js.getLogger('default-settings');
remove(testFile);
t.teardown(() => {
remove(testFile);
});
log4js.configure({
appenders: { sync: { type: 'fileSync', filename: testFile } },
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This should be in the file.');
log4js.shutdown(() => {
log4js.configure({
appenders: { sync: { type: 'fileSync', filename: testFile } },
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This should also be in the file.');
fs.readFile(testFile, 'utf8', (err, fileContents) => {
t.match(fileContents, `This should be in the file.${EOL}`);
t.match(fileContents, `This should also be in the file.${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
t.end();
});
});
});
batch.test('should give error if invalid filename', async (t) => {
const file = '';
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'fileSync',
filename: file,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
new Error(`Invalid filename: ${file}`)
);
const dir = `.${path.sep}`;
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'fileSync',
filename: dir,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
new Error(`Filename is a directory: ${dir}`)
);
t.end();
});
batch.test('should give error if invalid maxLogSize', async (t) => {
const maxLogSize = -1;
const expectedError = new Error(`maxLogSize (${maxLogSize}) should be > 0`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: 'fileSync',
filename: path.join(
__dirname,
'fa-invalidMaxFileSize-sync-test.log'
),
maxLogSize: -1,
},
},
categories: {
default: { appenders: ['file'], level: 'debug' },
},
}),
expectedError
);
t.end();
});
batch.test('with a max file size and no backups', (t) => {
const testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log');
const logger = log4js.getLogger('max-file-size');
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
maxLogSize: 100,
backups: 0,
},
},
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This is the first log message.');
logger.info('This is an intermediate log message.');
logger.info('This is the second log message.');
t.test('log file should only contain the second message', (assert) => {
fs.readFile(testFile, 'utf8', (err, fileContents) => {
assert.match(fileContents, `This is the second log message.${EOL}`);
assert.equal(
fileContents.indexOf('This is the first log message.'),
-1
);
assert.end();
});
});
t.test('there should be one test files', (assert) => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-sync-test.log')
);
assert.equal(logFiles.length, 1);
assert.end();
});
});
t.end();
});
batch.test('with a max file size in unit mode and no backups', (t) => {
const testFile = path.join(__dirname, '/fa-maxFileSize-unit-sync-test.log');
const logger = log4js.getLogger('max-file-size-unit');
remove(testFile);
remove(`${testFile}.1`);
t.teardown(() => {
remove(testFile);
remove(`${testFile}.1`);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
maxLogSize: '1K',
backups: 0,
layout: { type: 'messagePassThrough' },
},
},
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
const maxLine = 22; // 1024 max file size / 47 bytes per line
for (let i = 0; i < maxLine; i++) {
logger.info('These are the log messages for the first file.'); // 46 bytes per line + '\n'
}
logger.info('This is the second log message.');
t.test('log file should only contain the second message', (assert) => {
fs.readFile(testFile, 'utf8', (err, fileContents) => {
assert.match(fileContents, `This is the second log message.${EOL}`);
assert.notMatch(
fileContents,
'These are the log messages for the first file.'
);
assert.end();
});
});
t.test('there should be one test file', (assert) => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-unit-sync-test.log')
);
assert.equal(logFiles.length, 1);
assert.end();
});
});
t.end();
});
batch.test('with a max file size and 2 backups', (t) => {
const testFile = path.join(
__dirname,
'/fa-maxFileSize-with-backups-sync-test.log'
);
const logger = log4js.getLogger('max-file-size-backups');
remove(testFile);
remove(`${testFile}.1`);
remove(`${testFile}.2`);
t.teardown(() => {
remove(testFile);
remove(`${testFile}.1`);
remove(`${testFile}.2`);
});
// log file of 50 bytes maximum, 2 backups
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
maxLogSize: 50,
backups: 2,
},
},
categories: { default: { appenders: ['sync'], level: 'debug' } },
});
logger.info('This is the first log message.');
logger.info('This is the second log message.');
logger.info('This is the third log message.');
logger.info('This is the fourth log message.');
t.test('the log files', (assert) => {
assert.plan(5);
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter((file) =>
file.includes('fa-maxFileSize-with-backups-sync-test.log')
);
assert.equal(logFiles.length, 3, 'should be 3 files');
assert.same(
logFiles,
[
'fa-maxFileSize-with-backups-sync-test.log',
'fa-maxFileSize-with-backups-sync-test.log.1',
'fa-maxFileSize-with-backups-sync-test.log.2',
],
'should be named in sequence'
);
fs.readFile(
path.join(__dirname, logFiles[0]),
'utf8',
(e, contents) => {
assert.match(contents, 'This is the fourth log message.');
}
);
fs.readFile(
path.join(__dirname, logFiles[1]),
'utf8',
(e, contents) => {
assert.match(contents, 'This is the third log message.');
}
);
fs.readFile(
path.join(__dirname, logFiles[2]),
'utf8',
(e, contents) => {
assert.match(contents, 'This is the second log message.');
}
);
});
});
t.end();
});
batch.test('configure with fileSyncAppender', (t) => {
const testFile = 'tmp-sync-tests.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// this config defines one file appender (to ./tmp-sync-tests.log)
// and sets the log level for "tests" to WARN
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
tests: { appenders: ['sync'], level: 'warn' },
},
});
const logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile(testFile, 'utf8', (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.equal(contents.indexOf('this should not be written to the file'), -1);
t.end();
});
});
batch.test(
'configure with non-existent multi-directory (recursive, nodejs >= 10.12.0)',
(t) => {
const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-recursive.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync('tmpA/tmpB/tmpC');
fs.rmdirSync('tmpA/tmpB');
fs.rmdirSync('tmpA');
} catch (e) {
// doesn't matter
}
});
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
});
const logger = log4js.getLogger();
logger.info('this should be written to the file');
fs.readFile(testFile, 'utf8', (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.end();
});
}
);
batch.test(
'configure with non-existent multi-directory (non-recursive, nodejs < 10.12.0)',
(t) => {
const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-non-recursive.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync('tmpA/tmpB/tmpC');
fs.rmdirSync('tmpA/tmpB');
fs.rmdirSync('tmpA');
} catch (e) {
// doesn't matter
}
});
const sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
fs: {
...fs,
mkdirSync(dirPath, options) {
return fs.mkdirSync(dirPath, {
...options,
...{ recursive: false },
});
},
},
},
});
sandboxedLog4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
});
const logger = sandboxedLog4js.getLogger();
logger.info('this should be written to the file');
fs.readFile(testFile, 'utf8', (err, contents) => {
t.match(contents, `this should be written to the file${EOL}`);
t.end();
});
}
);
batch.test(
'configure with non-existent multi-directory (error handling)',
(t) => {
const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-error-handling.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
try {
fs.rmdirSync('tmpA/tmpB/tmpC');
fs.rmdirSync('tmpA/tmpB');
fs.rmdirSync('tmpA');
} catch (e) {
// doesn't matter
}
});
const errorEPERM = new Error('EPERM');
errorEPERM.code = 'EPERM';
let sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEPERM;
},
},
},
});
t.throws(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
}),
errorEPERM
);
const errorEROFS = new Error('EROFS');
errorEROFS.code = 'EROFS';
sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEROFS;
},
statSync() {
return {
isDirectory() {
return false;
},
};
},
},
},
});
t.throws(
() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
}),
errorEROFS
);
fs.mkdirSync('tmpA');
fs.mkdirSync('tmpA/tmpB');
fs.mkdirSync('tmpA/tmpB/tmpC');
sandboxedLog4js = sandbox.require('../../lib/log4js', {
requires: {
fs: {
...fs,
mkdirSync() {
throw errorEROFS;
},
},
},
});
t.doesNotThrow(() =>
sandboxedLog4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
},
},
categories: {
default: { appenders: ['sync'], level: 'debug' },
},
})
);
t.end();
}
);
batch.test('test options', (t) => {
const testFile = 'tmp-options-tests.log';
remove(testFile);
t.teardown(() => {
remove(testFile);
});
// using non-standard options
log4js.configure({
appenders: {
sync: {
type: 'fileSync',
filename: testFile,
layout: { type: 'messagePassThrough' },
flags: 'w',
encoding: 'ascii',
mode: 0o666,
},
},
categories: {
default: { appenders: ['sync'], level: 'info' },
},
});
const logger = log4js.getLogger();
logger.warn('log message');
fs.readFile(testFile, 'ascii', (err, contents) => {
t.match(contents, `log message${EOL}`);
t.end();
});
});
batch.end();
});
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./examples/stacktrace.js | const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
'console-appender': {
type: 'console',
layout: {
type: 'pattern',
pattern: '%[[%p]%] - %10.-100f{2} | %7.12l:%7.12o - %[%m%]',
},
},
},
categories: {
default: {
appenders: ['console-appender'],
enableCallStack: true,
level: 'info',
},
},
});
log4js.getLogger().info('This should not cause problems');
| const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
'console-appender': {
type: 'console',
layout: {
type: 'pattern',
pattern: '%[[%p]%] - %10.-100f{2} | %7.12l:%7.12o - %[%m%]',
},
},
},
categories: {
default: {
appenders: ['console-appender'],
enableCallStack: true,
level: 'info',
},
},
});
log4js.getLogger().info('This should not cause problems');
| -1 |
|
log4js-node/log4js-node | 1,317 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order) | lamweili | "2022-09-01T14:04:39Z" | "2022-09-01T14:28:15Z" | be433f26c3169698aa7d8cd8b94584b6f0cb3ceb | accaef82a280bea076cae24a2d3ad2aa08549fb1 | refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order). | ./docs/connect-logger.md | # Connect / Express Logger
The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`.
```javascript
var log4js = require("log4js");
var express = require("express");
log4js.configure({
appenders: {
console: { type: "console" },
file: { type: "file", filename: "cheese.log" },
},
categories: {
cheese: { appenders: ["file"], level: "info" },
default: { appenders: ["console"], level: "info" },
},
});
var logger = log4js.getLogger("cheese");
var app = express();
app.use(log4js.connectLogger(logger, { level: "info" }));
app.get("/", function(req, res) {
res.send("hello world");
});
app.listen(5000);
```
The log4js.connectLogger supports the passing of an options object that can be used to set the following:
- log level
- log format string or function (the same as the connect/express logger)
- nolog expressions (represented as a string, regexp, array, or function(req, res))
- status code rulesets
For example:
```javascript
app.use(
log4js.connectLogger(logger, {
level: log4js.levels.INFO,
format: ":method :url",
})
);
```
or:
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
// include the Express request ID in the logs
format: (req, res, format) =>
format(
`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
),
})
);
```
When you request of POST, you want to log the request body parameter like JSON.
The log format function is very useful.
Please use log format function instead "tokens" property for use express's request or response.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "info",
format: (req, res, format) =>
format(`:remote-addr :method :url ${JSON.stringify(req.body)}`),
})
);
```
Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x.
- http responses 3xx, level = WARN
- http responses 4xx & 5xx, level = ERROR
- else, level = INFO
```javascript
app.use(log4js.connectLogger(logger, { level: "auto" }));
```
The levels of returned status codes can be configured via status code rulesets.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
statusRules: [
{ from: 200, to: 299, level: "debug" },
{ codes: [303, 304], level: "info" },
],
})
);
```
The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, array, or function(req, res) to omit certain log messages. Example of 1.2 below.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: "\\.gif|\\.jpg$",
})
);
```
or
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: (req, res) => res.statusCode < 400,
})
);
```
The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`.
Application can use it in layouts or appenders.
In application:
```javascript
app.use(log4js.connectLogger(logger, { context: true }));
```
In layout:
```javascript
log4js.addLayout("customLayout", () => {
return (loggingEvent) => {
const res = loggingEvent.context.res;
return util.format(
...loggingEvent.data,
res ? `status: ${res.statusCode}` : ""
);
};
});
```
## Example nolog values
| nolog value | Will Not Log | Will Log |
| --------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------ |
| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif |
| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge |
| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 |
| `/\.(gif\|jpe?g\|png)$/` | as above | as above |
| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` |
| # Connect / Express Logger
The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`.
```javascript
var log4js = require("log4js");
var express = require("express");
log4js.configure({
appenders: {
console: { type: "console" },
file: { type: "file", filename: "cheese.log" },
},
categories: {
cheese: { appenders: ["file"], level: "info" },
default: { appenders: ["console"], level: "info" },
},
});
var logger = log4js.getLogger("cheese");
var app = express();
app.use(log4js.connectLogger(logger, { level: "info" }));
app.get("/", function(req, res) {
res.send("hello world");
});
app.listen(5000);
```
The log4js.connectLogger supports the passing of an options object that can be used to set the following:
- log level
- log format string or function (the same as the connect/express logger)
- nolog expressions (represented as a string, regexp, array, or function(req, res))
- status code rulesets
For example:
```javascript
app.use(
log4js.connectLogger(logger, {
level: log4js.levels.INFO,
format: ":method :url",
})
);
```
or:
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
// include the Express request ID in the logs
format: (req, res, format) =>
format(
`:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
),
})
);
```
When you request of POST, you want to log the request body parameter like JSON.
The log format function is very useful.
Please use log format function instead "tokens" property for use express's request or response.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "info",
format: (req, res, format) =>
format(`:remote-addr :method :url ${JSON.stringify(req.body)}`),
})
);
```
Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x.
- http responses 3xx, level = WARN
- http responses 4xx & 5xx, level = ERROR
- else, level = INFO
```javascript
app.use(log4js.connectLogger(logger, { level: "auto" }));
```
The levels of returned status codes can be configured via status code rulesets.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
statusRules: [
{ from: 200, to: 299, level: "debug" },
{ codes: [303, 304], level: "info" },
],
})
);
```
The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, array, or function(req, res) to omit certain log messages. Example of 1.2 below.
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: "\\.gif|\\.jpg$",
})
);
```
or
```javascript
app.use(
log4js.connectLogger(logger, {
level: "auto",
format: ":method :url",
nolog: (req, res) => res.statusCode < 400,
})
);
```
The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`.
Application can use it in layouts or appenders.
In application:
```javascript
app.use(log4js.connectLogger(logger, { context: true }));
```
In layout:
```javascript
log4js.addLayout("customLayout", () => {
return (loggingEvent) => {
const res = loggingEvent.context.res;
return util.format(
...loggingEvent.data,
res ? `status: ${res.statusCode}` : ""
);
};
});
```
## Example nolog values
| nolog value | Will Not Log | Will Log |
| --------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------ |
| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif |
| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge |
| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 |
| `/\.(gif\|jpe?g\|png)$/` | as above | as above |
| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` |
| -1 |