Core functionality
commit
0d89f36319
@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
coverage
|
||||
dist
|
@ -0,0 +1,6 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"semi": true,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 160
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
module.exports = {
|
||||
"roots": [
|
||||
"<rootDir>/src"
|
||||
],
|
||||
"setupFilesAfterEnv": ["./src/tests/utils/helpers.ts"],
|
||||
"testMatch": [
|
||||
"**/__tests__/**/*.+(ts|tsx|js)",
|
||||
"**/?(*.)+(spec|test).+(ts|tsx|js)"
|
||||
],
|
||||
"testPathIgnorePatterns" : [
|
||||
".*/tests/.*.ts"
|
||||
],
|
||||
"transform": {
|
||||
"^.+\\.(ts|tsx)$": "ts-jest"
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "dotenv-tool",
|
||||
"version": "1.0.0",
|
||||
"description": "Tool to read and update .env files",
|
||||
"main": "dist/index.js",
|
||||
"bin": {
|
||||
"dotenv-tool": "./dist/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "npx tsc",
|
||||
"test": "jest",
|
||||
"coverage": "jest --coverage"
|
||||
},
|
||||
"author": "Dominik Dzienia <dominik.dzienia@gmail.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.3",
|
||||
"@types/node": "^20.4.1",
|
||||
"@types/parsimmon": "^1.10.6",
|
||||
"jest": "^29.6.1",
|
||||
"ts-jest": "^29.1.1",
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bconnorwhite/module": "^2.0.2",
|
||||
"commander": "^11.0.0",
|
||||
"parsimmon": "^1.18.1"
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
#! /usr/bin/env node
|
||||
import commander, { program } from 'commander';
|
||||
import { getVersionSync } from '@bconnorwhite/module';
|
||||
|
||||
function makeReadCommand() {
|
||||
const getCmd = new commander.Command('get');
|
||||
getCmd
|
||||
.description('Returns given variable from env file (if specified)')
|
||||
.option('-f, --file <filePath>', 'Input file to parse (if not given, stdio is used)')
|
||||
.argument('<key>', 'env variable name a.k.a. key')
|
||||
.action((key) => {
|
||||
const options = program.opts();
|
||||
console.log(options);
|
||||
console.log('heat jug', key);
|
||||
});
|
||||
|
||||
return getCmd;
|
||||
}
|
||||
|
||||
program
|
||||
.name('dotenv-tool')
|
||||
.description('Tool to read and update .env files')
|
||||
.version(getVersionSync(__dirname) || '1.0.0', '-v, --version')
|
||||
.argument('[paramsToSet]', 'space separated list of additional envs to set, in format key=value', '')
|
||||
.option('-f, --files <filePaths...>', 'Input file(s)')
|
||||
.option('-o, --outputFile <filePath>', 'Output file')
|
||||
.option('-m, --modify', 'Modify first input file')
|
||||
.action((paramsToSet) => {
|
||||
console.log('got', paramsToSet);
|
||||
const options = program.opts();
|
||||
console.log(options);
|
||||
})
|
||||
.addCommand(makeReadCommand())
|
||||
|
||||
.parse(process.argv);
|
||||
|
||||
|
@ -0,0 +1,281 @@
|
||||
import { update, fix_token_list } from './manipulation';
|
||||
import { stringifyTokens } from './parser';
|
||||
import { ModifyMode } from './types';
|
||||
|
||||
describe('Updating tokens', () => {
|
||||
it('Creates empty file', () => {
|
||||
expect(stringifyTokens(update([], [], { normalize: true }))).toEqual('\n');
|
||||
expect(stringifyTokens(update([], [], { enforceNewLineEnd: false }))).toEqual('');
|
||||
});
|
||||
it('appends at update with APPEND mode', () => {
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update(
|
||||
[],
|
||||
[
|
||||
['ALAMA', 'kota'],
|
||||
['KOTMA', 'alę'],
|
||||
],
|
||||
{ modifyMode: ModifyMode.APPEND },
|
||||
),
|
||||
),
|
||||
).toEqual('ALAMA=kota\nKOTMA=alę\n');
|
||||
|
||||
expect(stringifyTokens(update([], [{ name: 'ALAMA', value: 'kota', comment: ' # ma' }, ['KOTMA', 'alę']]))).toEqual('ALAMA=kota # ma\nKOTMA=alę\n');
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update([vtoken('HOST', '127.0.0.1'), nltoken(), vtoken('PORT', '80'), nltoken(), vtoken('LOGIN', 'root')], [['PASSWORD', 'secret']], {
|
||||
modifyMode: ModifyMode.APPEND,
|
||||
}),
|
||||
),
|
||||
).toEqual('HOST=127.0.0.1\nPORT=80\nLOGIN=root\nPASSWORD=secret\n');
|
||||
});
|
||||
|
||||
it('appends does not append with EXISTING_ONLY mode', () => {
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update(
|
||||
[],
|
||||
[
|
||||
['ALAMA', 'kota'],
|
||||
['KOTMA', 'alę'],
|
||||
],
|
||||
{ modifyMode: ModifyMode.EXISTING_ONLY },
|
||||
),
|
||||
),
|
||||
).toEqual('\n');
|
||||
});
|
||||
|
||||
it('update existing value', () => {
|
||||
expect(stringifyTokens(update([vtoken('ALA', 'ma kota')], [['ALA', 'ma psa']], { modifyMode: ModifyMode.APPEND }))).toEqual('ALA="ma psa"\n');
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update(
|
||||
[vtoken('ALA', 'ma kota')],
|
||||
[
|
||||
['ALA', 'ma psa'],
|
||||
['ALA', 'ma jednak kota'],
|
||||
],
|
||||
{ modifyMode: ModifyMode.APPEND },
|
||||
),
|
||||
),
|
||||
).toEqual('ALA="ma jednak kota"\n');
|
||||
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update(
|
||||
[vtoken('HOST', '127.0.0.1'), nltoken(), vtoken('PORT', '80'), nltoken(), vtoken('LOGIN', 'root')],
|
||||
[['LOGIN', 'debian'], { name: 'PORT', value: '8080', comment: ' # debug only' }],
|
||||
{ modifyMode: ModifyMode.APPEND },
|
||||
),
|
||||
),
|
||||
).toEqual('HOST=127.0.0.1\nPORT=8080 # debug only\nLOGIN=debian\n');
|
||||
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update(
|
||||
[
|
||||
ctoken('###########'),
|
||||
nltoken(),
|
||||
ctoken('# Server'),
|
||||
nltoken(),
|
||||
ctoken('###########'),
|
||||
nltoken('\n\n'),
|
||||
vtoken('SERVER_HOST', '127.0.0.1'),
|
||||
nltoken(),
|
||||
vtoken('SERVER_PORT', '80'),
|
||||
nltoken(),
|
||||
vtoken('SERVER_LOGIN', 'root'),
|
||||
nltoken('\n\n'),
|
||||
ctoken('###########'),
|
||||
nltoken(),
|
||||
ctoken('# Client'),
|
||||
nltoken(),
|
||||
ctoken('###########'),
|
||||
nltoken('\n\n'),
|
||||
vtoken('CLIENT_LOGIN', 'john'),
|
||||
],
|
||||
[['SERVER_LOGIN', 'debian'], { name: 'SERVER_PORT', value: '8080', comment: ' # debug only' }, ['SERVER_PASSWORD', 'secret']],
|
||||
{ modifyMode: ModifyMode.APPEND },
|
||||
),
|
||||
),
|
||||
).toEqual(
|
||||
'###########\n# Server\n###########\n\nSERVER_HOST=127.0.0.1\nSERVER_PORT=8080 # debug only\nSERVER_LOGIN=debian\n\n###########\n# Client\n###########\n\nCLIENT_LOGIN=john\nSERVER_PASSWORD=secret\n',
|
||||
);
|
||||
});
|
||||
|
||||
it('delete value', () => {
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update([vtoken('HOST', '127.0.0.1'), nltoken(), vtoken('PORT', '80'), nltoken(), vtoken('LOGIN', 'root')], [['PORT', null]], {
|
||||
modifyMode: ModifyMode.APPEND,
|
||||
}),
|
||||
),
|
||||
).toEqual('HOST=127.0.0.1\nLOGIN=root\n');
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update([vtoken('HOST', '127.0.0.1'), nltoken(), vtoken('PORT', '80'), nltoken('\n\n\n'), vtoken('LOGIN', 'root')], [['PORT', null]], {
|
||||
modifyMode: ModifyMode.APPEND,
|
||||
}),
|
||||
),
|
||||
).toEqual('HOST=127.0.0.1\n\n\nLOGIN=root\n');
|
||||
});
|
||||
|
||||
it('appends empty', () => {
|
||||
expect(stringifyTokens(update([], [{ name: 'test', value: undefined }], { modifyMode: ModifyMode.APPEND }))).toEqual('test=\n');
|
||||
expect(stringifyTokens(update([], [{ name: 'test', value: '' }], { modifyMode: ModifyMode.APPEND }))).toEqual('test=\n');
|
||||
});
|
||||
|
||||
it('appends smart', () => {
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update(
|
||||
[
|
||||
vtoken('SERVER_HOST', '127.0.0.1'),
|
||||
vtoken('SERVER_PORT', '80'),
|
||||
vtoken('SERVER_LOGIN', 'root'),
|
||||
ctoken('#-------------------------'),
|
||||
vtoken('CLIENT_LOGIN', 'john'),
|
||||
vtoken('CLIENT_X_AXIS', '12'),
|
||||
ctoken('#-------------------------'),
|
||||
vtoken('AUTO_RUN', 'true'),
|
||||
vtoken('AUTO_CLEAN', 'false'),
|
||||
],
|
||||
[
|
||||
['CLIENT_ACCESS', 'limited'],
|
||||
['SERVER_OUTPUT', '/dev/null'],
|
||||
['CLIENT_Z_AXIS', '100'],
|
||||
['ZOOM', '100%'],
|
||||
['AUTO_STOP', 'true'],
|
||||
['QUALITY', '90%'],
|
||||
['AUTO_APPEND', 'true'],
|
||||
['AUTO_ZERO', '000'],
|
||||
],
|
||||
{ modifyMode: ModifyMode.SMART_APPEND },
|
||||
),
|
||||
),
|
||||
).toEqual(
|
||||
'SERVER_HOST=127.0.0.1\nSERVER_OUTPUT=/dev/null\nSERVER_PORT=80\nSERVER_LOGIN=root\n' +
|
||||
'#-------------------------\n' +
|
||||
'CLIENT_ACCESS=limited\nCLIENT_LOGIN=john\nCLIENT_X_AXIS=12\nCLIENT_Z_AXIS=100\n' +
|
||||
'#-------------------------\n' +
|
||||
'AUTO_APPEND=true\nAUTO_RUN=true\nAUTO_CLEAN=false\nAUTO_STOP=true\nAUTO_ZERO=000\n' +
|
||||
'QUALITY=90%\nZOOM=100%\n',
|
||||
);
|
||||
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update(
|
||||
[],
|
||||
[
|
||||
['SERVER_HOST', '127.0.0.1'],
|
||||
['SERVER_PORT', '80'],
|
||||
['SERVER_LOGIN', 'root'],
|
||||
['CLIENT_LOGIN', 'john'],
|
||||
['CLIENT_X_AXIS', '12'],
|
||||
['AUTO_RUN', 'true'],
|
||||
['AUTO_CLEAN', 'false'],
|
||||
['CLIENT_ACCESS', 'limited'],
|
||||
['SERVER_OUTPUT', '/dev/null'],
|
||||
['CLIENT_Z_AXIS', '100'],
|
||||
['ZOOM', '100%'],
|
||||
['AUTO_STOP', 'true'],
|
||||
['QUALITY', '90%'],
|
||||
['AUTO_APPEND', 'true'],
|
||||
['AUTO_ZERO', '000'],
|
||||
],
|
||||
{ modifyMode: ModifyMode.SMART_APPEND },
|
||||
),
|
||||
),
|
||||
).toEqual(
|
||||
'AUTO_APPEND=true\nAUTO_CLEAN=false\nAUTO_RUN=true\nAUTO_STOP=true\nAUTO_ZERO=000\n' +
|
||||
'CLIENT_ACCESS=limited\nCLIENT_LOGIN=john\nCLIENT_X_AXIS=12\nCLIENT_Z_AXIS=100\n' +
|
||||
'QUALITY=90%\n' +
|
||||
'SERVER_HOST=127.0.0.1\nSERVER_LOGIN=root\nSERVER_OUTPUT=/dev/null\nSERVER_PORT=80\n' +
|
||||
'ZOOM=100%\n',
|
||||
);
|
||||
|
||||
expect(
|
||||
stringifyTokens(
|
||||
update(
|
||||
[
|
||||
ctoken('###########'),
|
||||
nltoken(),
|
||||
ctoken('# Server'),
|
||||
nltoken(),
|
||||
ctoken('###########'),
|
||||
nltoken('\n\n'),
|
||||
vtoken('SERVER_HOST', '127.0.0.1'),
|
||||
nltoken(),
|
||||
vtoken('SERVER_PORT', '80'),
|
||||
nltoken(),
|
||||
vtoken('SERVER_LOGIN', 'root'),
|
||||
nltoken('\n\n'),
|
||||
ctoken('###########'),
|
||||
nltoken(),
|
||||
ctoken('# Client'),
|
||||
nltoken(),
|
||||
ctoken('###########'),
|
||||
nltoken('\n\n'),
|
||||
vtoken('CLIENT_LOGIN', 'john'),
|
||||
],
|
||||
[['SERVER_LOGIN', 'debian'], { name: 'SERVER_PORT', value: '8080', comment: ' # debug only' }, ['SERVER_PASSWORD', 'secret']],
|
||||
{ modifyMode: ModifyMode.SMART_APPEND },
|
||||
),
|
||||
),
|
||||
).toEqual(
|
||||
'###########\n# Server\n###########\n\nSERVER_HOST=127.0.0.1\nSERVER_PASSWORD=secret\nSERVER_PORT=8080 # debug only\nSERVER_LOGIN=debian\n\n###########\n# Client\n###########\n\nCLIENT_LOGIN=john\n',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Fixing token list', () => {
|
||||
it('merges and interleave tokens with new lines', () => {
|
||||
expect(
|
||||
fix_token_list([
|
||||
ctoken('###########'),
|
||||
ctoken('# Server'),
|
||||
ctoken('###########'),
|
||||
nltoken('\n'),
|
||||
nltoken('\n'),
|
||||
vtoken('SERVER_HOST', '127.0.0.1'),
|
||||
vtoken('SERVER_PORT', '80'),
|
||||
vtoken('SERVER_LOGIN', 'root'),
|
||||
nltoken('\n\n'),
|
||||
ctoken('###########'),
|
||||
nltoken(),
|
||||
ctoken('# Client'),
|
||||
ctoken('###########'),
|
||||
nltoken('\n'),
|
||||
nltoken('\n'),
|
||||
nltoken('\n'),
|
||||
nltoken('\n'),
|
||||
wstoken('\t'),
|
||||
wstoken(' '),
|
||||
wstoken('\t'),
|
||||
vtoken('CLIENT_LOGIN', 'john'),
|
||||
]),
|
||||
).toEqual([
|
||||
ctoken('###########'),
|
||||
nltoken(),
|
||||
ctoken('# Server'),
|
||||
nltoken(),
|
||||
ctoken('###########'),
|
||||
nltoken('\n\n'),
|
||||
vtoken('SERVER_HOST', '127.0.0.1'),
|
||||
nltoken(),
|
||||
vtoken('SERVER_PORT', '80'),
|
||||
nltoken(),
|
||||
vtoken('SERVER_LOGIN', 'root'),
|
||||
nltoken('\n\n'),
|
||||
ctoken('###########'),
|
||||
nltoken(),
|
||||
ctoken('# Client'),
|
||||
nltoken(),
|
||||
ctoken('###########'),
|
||||
nltoken('\n\n\n\n'),
|
||||
wstoken('\t \t'),
|
||||
vtoken('CLIENT_LOGIN', 'john'),
|
||||
]);
|
||||
});
|
||||
});
|
@ -0,0 +1,175 @@
|
||||
import { ModifyMode, Variable, VariableToken, TokenType, Token, StringifyConfig, VariableToUpdate } from './types';
|
||||
import { compareWeighted, getDefaultConfig } from './utils';
|
||||
|
||||
export function VariableTokenFrom(variable: Variable | VariableToken): VariableToken {
|
||||
return {
|
||||
beginning: (variable as VariableToken).beginning || '',
|
||||
comment: (variable as Variable).comment || '',
|
||||
ending: (variable as VariableToken).ending || '',
|
||||
equals: (variable as VariableToken).equals || '=',
|
||||
name: (variable as Variable).name,
|
||||
quote: (variable as VariableToken).quote || '',
|
||||
token: TokenType.VARIABLE,
|
||||
value: (variable as Variable).value || '',
|
||||
};
|
||||
}
|
||||
|
||||
export function fix_token_list(tokens: Token[]): Token[] {
|
||||
let fixed: Token[] = [];
|
||||
let current = tokens.shift();
|
||||
|
||||
while (current) {
|
||||
const next = tokens.shift();
|
||||
if (next) {
|
||||
if ((current.token === TokenType.VARIABLE || current.token === TokenType.COMMENT) && next.token !== TokenType.NEWLINE) {
|
||||
fixed.push(current);
|
||||
fixed.push({ token: TokenType.NEWLINE, value: '\n' });
|
||||
current = next;
|
||||
} else if (current.token === next.token) {
|
||||
current.value = current.value + next.value;
|
||||
} else {
|
||||
fixed.push(current);
|
||||
current = next;
|
||||
}
|
||||
} else {
|
||||
fixed.push(current);
|
||||
current = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
return fixed;
|
||||
}
|
||||
|
||||
export function normalize(tokens: Token[], config?: StringifyConfig): Token[] {
|
||||
const cfg = getDefaultConfig(config);
|
||||
if (cfg.normalize) {
|
||||
tokens = tokens.filter((t) => t.token === TokenType.WHITESPACE);
|
||||
tokens = tokens.map((t) => {
|
||||
switch (t.token) {
|
||||
case TokenType.COMMENT:
|
||||
t.value = t.value.trim();
|
||||
return t;
|
||||
case TokenType.VARIABLE:
|
||||
(t as VariableToken).beginning = '';
|
||||
(t as VariableToken).ending = '';
|
||||
(t as VariableToken).equals = '=';
|
||||
(t as VariableToken).comment = ' ' + (t as VariableToken).comment.trim();
|
||||
return t;
|
||||
case TokenType.NEWLINE:
|
||||
t.value = t.value.replace(/\r/g, '');
|
||||
return t;
|
||||
default:
|
||||
return t;
|
||||
}
|
||||
});
|
||||
tokens = fix_token_list(tokens);
|
||||
while (tokens.length > 0 && tokens[0].token === TokenType.NEWLINE) {
|
||||
tokens.shift();
|
||||
}
|
||||
tokens = tokens.map((t, i, to) => {
|
||||
if (t.token === TokenType.NEWLINE) {
|
||||
if (i > 0 && i < to.length - 1) {
|
||||
const tPrev = to[i - 1].token;
|
||||
const tNext = to[i + 1].token;
|
||||
if (tPrev === TokenType.VARIABLE && tNext === TokenType.VARIABLE) {
|
||||
if (t.value.length > 2) {
|
||||
t.value = '\n\n';
|
||||
}
|
||||
}
|
||||
if ((tPrev === TokenType.VARIABLE && tNext === TokenType.COMMENT) || (tPrev === TokenType.COMMENT && tNext === TokenType.VARIABLE)) {
|
||||
if (t.value.length != 2) {
|
||||
t.value = '\n\n';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return t;
|
||||
});
|
||||
|
||||
if (tokens.length == 0 || tokens[tokens.length - 1].token !== TokenType.NEWLINE) {
|
||||
tokens.push({ token: TokenType.NEWLINE, value: '\n' });
|
||||
}
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
export function update(tokens: Token[], updateWith: VariableToUpdate[], config?: StringifyConfig): Token[] {
|
||||
const cfg = getDefaultConfig(config);
|
||||
|
||||
tokens = fix_token_list(tokens);
|
||||
|
||||
updateWith.forEach((u) => {
|
||||
const updateVar: Variable | VariableToken = Array.isArray(u) ? { name: u[0], value: u[1] } : u;
|
||||
const tokenToUpdate = tokens.findIndex((t) => t.token === TokenType.VARIABLE && (t as VariableToken).name === updateVar.name);
|
||||
if (tokenToUpdate > -1) {
|
||||
// delete
|
||||
if (updateVar.value == null) {
|
||||
if (tokenToUpdate < tokens.length - 1 && tokens[tokenToUpdate + 1].token === TokenType.NEWLINE) {
|
||||
if (tokens[tokenToUpdate + 1].value.length > 1) {
|
||||
const cutPos = tokens[tokenToUpdate + 1].value.indexOf('\n');
|
||||
if (cutPos > -1) {
|
||||
tokens[tokenToUpdate + 1].value = tokens[tokenToUpdate + 1].value.substring(0, cutPos) + tokens[tokenToUpdate + 1].value.substring(cutPos + 1);
|
||||
}
|
||||
} else {
|
||||
tokens.splice(tokenToUpdate + 1, 1);
|
||||
}
|
||||
}
|
||||
tokens.splice(tokenToUpdate, 1);
|
||||
} else {
|
||||
(tokens[tokenToUpdate] as VariableToken).value = updateVar.value;
|
||||
if (updateVar.comment) {
|
||||
(tokens[tokenToUpdate] as VariableToken).comment = updateVar.comment;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
switch (cfg.modifyMode) {
|
||||
case ModifyMode.SMART_APPEND:
|
||||
const indexedNames: [string, number][] = tokens.flatMap((t, i) => (t.token === TokenType.VARIABLE ? [[(t as VariableToken).name, i]] : []));
|
||||
let insertBeforePos = -1;
|
||||
let insertBeforeDiff = -1;
|
||||
let insertAfterPos = -1;
|
||||
let insertAfterDiff = 1;
|
||||
indexedNames.every((e) => {
|
||||
const diff = compareWeighted(updateVar.name, e[0]);
|
||||
if (diff < 0 && diff < insertBeforeDiff) {
|
||||
insertBeforePos = e[1];
|
||||
insertBeforeDiff = diff;
|
||||
} else if (diff > 0 && diff >= insertAfterDiff) {
|
||||
insertAfterPos = e[1];
|
||||
insertAfterDiff = diff;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
if (insertBeforePos > -1 || insertAfterPos == -1) {
|
||||
tokens.splice(insertBeforePos > -1 ? insertBeforePos : 0, 0, VariableTokenFrom(updateVar), { token: TokenType.NEWLINE, value: '\n' });
|
||||
} else {
|
||||
tokens.splice(insertAfterPos + 1, 0, { token: TokenType.NEWLINE, value: '\n' }, VariableTokenFrom(updateVar));
|
||||
}
|
||||
break;
|
||||
|
||||
case ModifyMode.APPEND:
|
||||
if (tokens.length > 0 && tokens[tokens.length - 1].token !== TokenType.NEWLINE) {
|
||||
tokens.push({ token: TokenType.NEWLINE, value: '\n' });
|
||||
}
|
||||
tokens.push(VariableTokenFrom(updateVar));
|
||||
tokens.push({ token: TokenType.NEWLINE, value: '\n' });
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (cfg.normalize) {
|
||||
tokens = normalize(tokens, cfg);
|
||||
}
|
||||
|
||||
if (cfg.normalize || cfg.enforceNewLineEnd) {
|
||||
// end each file with new line
|
||||
if (tokens.length == 0 || tokens[tokens.length - 1].token !== TokenType.NEWLINE) {
|
||||
tokens.push({ token: TokenType.NEWLINE, value: '\n' });
|
||||
}
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
@ -0,0 +1,191 @@
|
||||
import { parseMultiLine, stringifyTokens } from './parser';
|
||||
|
||||
describe('MultiLine format parser', () => {
|
||||
it('parse simple prop', () => {
|
||||
expect(parseMultiLine('VARNAME=value')).toEqual([vtoken('VARNAME', 'value')]);
|
||||
expect(parseMultiLine('VARNAME =value')).toEqual([vtoken('VARNAME', 'value', '', '', '', '', ' =')]);
|
||||
expect(parseMultiLine('VARNAME= value')).toEqual([vtoken('VARNAME', 'value', '', '', '', '', '= ')]);
|
||||
expect(parseMultiLine(' VARNAME=value')).toEqual([vtoken('VARNAME', 'value', '', '', ' ')]);
|
||||
expect(parseMultiLine('VARNAME=value \t ')).toEqual([vtoken('VARNAME', 'value', '', '', '', ' \t ')]);
|
||||
expect(parseMultiLine('VARNAME=A:\\DYNA')).toEqual([vtoken('VARNAME', 'A:\\DYNA')]);
|
||||
expect(parseMultiLine('VARNAME=quote\'is"ok')).toEqual([vtoken('VARNAME', 'quote\'is"ok')]);
|
||||
expect(parseMultiLine('VARNAME=value not escaped')).toMatch(/PARSING FAILED/);
|
||||
});
|
||||
|
||||
it('prop have defined names', () => {
|
||||
expect(parseMultiLine('abc=value')).toEqual([vtoken('abc', 'value')]);
|
||||
expect(parseMultiLine('aBc12_ed=value')).toEqual([vtoken('aBc12_ed', 'value')]);
|
||||
expect(parseMultiLine('__=value')).toEqual([vtoken('__', 'value')]);
|
||||
expect(parseMultiLine('_FINE=value')).toEqual([vtoken('_FINE', 'value')]);
|
||||
expect(parseMultiLine('A=value')).toEqual([vtoken('A', 'value')]);
|
||||
expect(parseMultiLine('x00001=value')).toEqual([vtoken('x00001', 'value')]);
|
||||
expect(parseMultiLine('007=Bond')).toMatch(/PARSING FAILED/);
|
||||
expect(parseMultiLine('kłącze=value')).toMatch(/PARSING FAILED/);
|
||||
expect(parseMultiLine('var$name=value')).toMatch(/PARSING FAILED/);
|
||||
expect(parseMultiLine('not%alowed=value')).toMatch(/PARSING FAILED/);
|
||||
expect(parseMultiLine('#thiswontbevar=value')).toEqual([ctoken('#thiswontbevar=value')]);
|
||||
expect(parseMultiLine('and#thisisnoteven=comment')).toMatch(/PARSING FAILED/);
|
||||
});
|
||||
|
||||
it('parse double quoted prop', () => {
|
||||
expect(parseMultiLine('VARNAME="value"')).toEqual([vtoken('VARNAME', 'value', '"')]);
|
||||
expect(parseMultiLine('VARNAME ="value"')).toEqual([vtoken('VARNAME', 'value', '"', '', '', '', ' =')]);
|
||||
expect(parseMultiLine('VARNAME= "value"')).toEqual([vtoken('VARNAME', 'value', '"', '', '', '', '= ')]);
|
||||
expect(parseMultiLine('VARNAME="value with spaces"')).toEqual([vtoken('VARNAME', 'value with spaces', '"')]);
|
||||
expect(parseMultiLine('VARNAME=" value with spaces"')).toEqual([vtoken('VARNAME', ' value with spaces', '"')]);
|
||||
expect(parseMultiLine('VARNAME="value with spaces "')).toEqual([vtoken('VARNAME', 'value with spaces ', '"')]);
|
||||
expect(parseMultiLine('VARNAME="not closed')).toMatch(/PARSING FAILED/);
|
||||
});
|
||||
|
||||
it('escapes in double quot', () => {
|
||||
expect(parseMultiLine('VARNAME="test \\" it"')).toEqual([vtoken('VARNAME', 'test " it', '"')]);
|
||||
expect(parseMultiLine('VARNAME="\\"\\"\\"\\""')).toEqual([vtoken('VARNAME', '""""', '"')]);
|
||||
expect(parseMultiLine('VARNAME="C:\\\\WINDOWS"')).toEqual([vtoken('VARNAME', 'C:\\WINDOWS', '"')]);
|
||||
expect(parseMultiLine('VARNAME="\\\\\\\\\\\\"')).toEqual([vtoken('VARNAME', '\\\\\\', '"')]);
|
||||
expect(parseMultiLine('VARNAME="beginning\\nnext line"')).toEqual([vtoken('VARNAME', 'beginning\\nnext line', '"')]);
|
||||
});
|
||||
|
||||
it('parse single quoted prop', () => {
|
||||
expect(parseMultiLine(`VARNAME='value'`)).toEqual([vtoken('VARNAME', 'value', "'")]);
|
||||
expect(parseMultiLine(`VARNAME ='value'`)).toEqual([vtoken('VARNAME', 'value', "'", '', '', '', ' =')]);
|
||||
expect(parseMultiLine(`VARNAME= 'value'`)).toEqual([vtoken('VARNAME', 'value', "'", '', '', '', '= ')]);
|
||||
expect(parseMultiLine(`VARNAME='value with spaces'`)).toEqual([vtoken('VARNAME', 'value with spaces', "'")]);
|
||||
expect(parseMultiLine(`VARNAME=' value with spaces'`)).toEqual([vtoken('VARNAME', ' value with spaces', "'")]);
|
||||
expect(parseMultiLine(`VARNAME='value with spaces '`)).toEqual([vtoken('VARNAME', 'value with spaces ', "'")]);
|
||||
expect(parseMultiLine(`VARNAME='not closed`)).toMatch(/PARSING FAILED/);
|
||||
});
|
||||
|
||||
it('escapes in single quot', () => {
|
||||
expect(parseMultiLine(`VARNAME='test \\' it'`)).toEqual([vtoken('VARNAME', "test ' it", "'")]);
|
||||
expect(parseMultiLine(`VARNAME='\\'\\'\\'\\''`)).toEqual([vtoken('VARNAME', "''''", "'")]);
|
||||
expect(parseMultiLine(`VARNAME='C:\\WINDOWS'`)).toEqual([vtoken('VARNAME', 'C:\\WINDOWS', "'")]);
|
||||
expect(parseMultiLine(`VARNAME='\\\\\\\\\\\\'`)).toEqual([vtoken('VARNAME', '\\\\\\', "'")]);
|
||||
expect(parseMultiLine(`VARNAME='beginning\\nnext line'`)).toEqual([vtoken('VARNAME', 'beginning\\nnext line', "'")]);
|
||||
});
|
||||
|
||||
it('supports inline comments', () => {
|
||||
expect(parseMultiLine('VARNAME=value #comment')).toEqual([vtoken('VARNAME', 'value', '', ' #comment')]);
|
||||
expect(parseMultiLine('VARNAME = value # comment with extra ')).toEqual([vtoken('VARNAME', 'value', '', ' # comment with extra ', '', '', ' = ')]);
|
||||
expect(parseMultiLine('VARNAME="value" # fine')).toEqual([vtoken('VARNAME', 'value', '"', ' # fine')]);
|
||||
expect(parseMultiLine(`VARNAME='value' # -----`)).toEqual([vtoken('VARNAME', 'value', "'", ' # -----')]);
|
||||
expect(parseMultiLine('VARNAME=endswithhash# # plus comment')).toEqual([vtoken('VARNAME', 'endswithhash#', '', ' # plus comment')]);
|
||||
});
|
||||
|
||||
it('supports standalone comments', () => {
|
||||
expect(parseMultiLine('#')).toEqual([ctoken('#')]);
|
||||
expect(parseMultiLine('# ')).toEqual([ctoken('# ')]);
|
||||
expect(parseMultiLine(' #')).toEqual([ctoken(' #')]);
|
||||
expect(parseMultiLine(' # ')).toEqual([ctoken(' # ')]);
|
||||
expect(parseMultiLine('########')).toEqual([ctoken('########')]);
|
||||
});
|
||||
|
||||
it('handles empty file', () => {
|
||||
expect(parseMultiLine('')).toEqual([]);
|
||||
expect(parseMultiLine('\n')).toEqual([nltoken()]);
|
||||
expect(parseMultiLine('\n\n\n\n\n')).toEqual([nltoken('\n\n\n\n\n')]);
|
||||
expect(parseMultiLine(' \n\n \n')).toEqual([wstoken(' '), nltoken('\n\n'), wstoken(' '), nltoken()]);
|
||||
});
|
||||
|
||||
it('parse complex expressions', () => {
|
||||
expect(
|
||||
parseMultiLine(
|
||||
`VAR1=ALA\nVAR2="MA KOTA"\nVAR3 = value3\n\nVaR4="some \\" escape " \nVAR5=val5 #comment \n\n\nVAR6= abc \nVAR_COM=thisis#legit #here comment\n# this is comment \n # only comment \n\n #next comment \n # last\n#no space\n#\n\n \nPUSTA= \nempty2= "" \nVARS='ala'\nVARS2= 'ma kota'\nVARS3='o\\'clock'\nTESTSQ='not " need'\nTESTDQ="not ' need"`,
|
||||
),
|
||||
).toEqual([
|
||||
vtoken('VAR1', 'ALA'),
|
||||
nltoken(),
|
||||
vtoken('VAR2', 'MA KOTA', '"'),
|
||||
nltoken(),
|
||||
vtoken('VAR3', 'value3', '', '', '', '', ' = '),
|
||||
nltoken('\n\n'),
|
||||
vtoken('VaR4', 'some " escape ', '"', '', '', ' '),
|
||||
nltoken(),
|
||||
vtoken('VAR5', 'val5', '', ' #comment '),
|
||||
nltoken('\n\n\n'),
|
||||
vtoken('VAR6', 'abc', '', '', '', ' ', '= '),
|
||||
nltoken(),
|
||||
vtoken('VAR_COM', 'thisis#legit', '', ' #here comment'),
|
||||
nltoken(),
|
||||
ctoken('# this is comment '),
|
||||
nltoken(),
|
||||
ctoken(' # only comment '),
|
||||
nltoken('\n\n'),
|
||||
ctoken(' #next comment '),
|
||||
nltoken(),
|
||||
ctoken(' # last'),
|
||||
nltoken(),
|
||||
ctoken('#no space'),
|
||||
nltoken(),
|
||||
ctoken('#'),
|
||||
nltoken('\n\n'),
|
||||
wstoken(' '),
|
||||
nltoken(),
|
||||
vtoken('PUSTA', '', '', '', '', '', '= '),
|
||||
nltoken(),
|
||||
vtoken('empty2', '', '"', '', '', ' ', '= '),
|
||||
nltoken(),
|
||||
vtoken('VARS', 'ala', "'"),
|
||||
nltoken(),
|
||||
vtoken('VARS2', 'ma kota', "'", '', '', '', '= '),
|
||||
nltoken(),
|
||||
vtoken('VARS3', "o'clock", "'"),
|
||||
nltoken(),
|
||||
vtoken('TESTSQ', 'not " need', "'"),
|
||||
nltoken(),
|
||||
vtoken('TESTDQ', "not ' need", '"'),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('MultiLine format stringifier', () => {
|
||||
it('supports simple props, multiline', () => {
|
||||
expect(stringifyTokens([vtoken('VAR1', 'abcd1234')])).toEqual('VAR1=abcd1234');
|
||||
expect(stringifyTokens([vtoken('VAR1', 'Ala'), nltoken(), vtoken('VAR2', 'Ma'), nltoken(), vtoken('VAR3', 'Kota')])).toEqual(
|
||||
'VAR1=Ala\nVAR2=Ma\nVAR3=Kota',
|
||||
);
|
||||
});
|
||||
|
||||
it('enforces escaping', () => {
|
||||
expect(stringifyTokens([vtoken('VAR1', 'Ala ma kota')])).toEqual('VAR1="Ala ma kota"');
|
||||
expect(stringifyTokens([vtoken('VAR1', ' value')])).toEqual('VAR1=" value"');
|
||||
expect(stringifyTokens([vtoken('VAR1', 'value ')])).toEqual('VAR1="value "');
|
||||
expect(stringifyTokens([vtoken('VAR1', 'with\ttab')])).toEqual('VAR1="with\\ttab"');
|
||||
expect(stringifyTokens([vtoken('VAR1', 'multi\nline')])).toEqual('VAR1="multi\\nline"');
|
||||
expect(stringifyTokens([vtoken('VAR1', 'other\rspecial\fchars')])).toEqual('VAR1="other\\rspecial\\fchars"');
|
||||
});
|
||||
|
||||
it('is stable', () => {
|
||||
const source = `VAR1=ALA\nVAR2="MA KOTA"\nVAR3 = value3\n\nVaR4="some \\" escape " \nVAR5=val5 #comment \n\n\nVAR6= abc \nVAR_COM=thisis#legit #here comment\n# this is comment \n # only comment \n\n #next comment \n # last\n#no space\n#\n\n \nPUSTA= \nempty2= "" \nVARS='ala'\nVARS2= 'ma kota'\nVARS3='o\\'clock'\nTESTSQ='not " need'\nTESTDQ="not ' need"`;
|
||||
expect(stringifyTokens(parseMultiLine(source))).toEqual(source);
|
||||
});
|
||||
|
||||
it('fixes invalid fields', () => {
|
||||
expect(stringifyTokens([vtoken('VAR', 'value', '', 'without hash')])).toEqual('VAR=value # without hash');
|
||||
expect(stringifyTokens([vtoken('VAR', 'value', '', '# without space')])).toEqual('VAR=value # without space');
|
||||
expect(stringifyTokens([vtoken('VAR', 'value', '', 'junk # with junk')])).toEqual('VAR=value # with junk');
|
||||
expect(stringifyTokens([vtoken('VAR', 'value', '', '', '', '', ' zło ')])).toEqual('VAR=value');
|
||||
expect(stringifyTokens([vtoken('VAR', 'value', '', '', '', '', ' junk = junktoo')])).toEqual('VAR=value');
|
||||
expect(stringifyTokens([vtoken('VAR', 'value', 'a')])).toEqual('VAR=value');
|
||||
expect(stringifyTokens([vtoken('VAR', 'value', '', '', ' zzz ')])).toEqual('VAR=value');
|
||||
expect(stringifyTokens([vtoken('VAR', 'value', '', '', '', ' xxx ')])).toEqual('VAR=value');
|
||||
});
|
||||
|
||||
it('quotes values when needed', () => {
|
||||
expect(stringifyTokens([vtoken('VAR', 'value with spaces', '')])).toEqual('VAR="value with spaces"');
|
||||
expect(stringifyTokens([vtoken('VAR', '"need quotes', '')])).toEqual('VAR="\\"need quotes"');
|
||||
expect(stringifyTokens([vtoken('VAR', "'need quotes", '')])).toEqual('VAR="\'need quotes"');
|
||||
});
|
||||
|
||||
it('supports all tokens', () => {
|
||||
expect(stringifyTokens([nltoken('\n\n')])).toEqual('\n\n');
|
||||
expect(stringifyTokens([nltoken(' \njunk \r')])).toEqual('\n\r');
|
||||
expect(stringifyTokens([wstoken(' ')])).toEqual(' ');
|
||||
expect(stringifyTokens([wstoken()])).toEqual('');
|
||||
expect(stringifyTokens([wstoken('\t \tjunk\t \t')])).toEqual('\t \t\t \t');
|
||||
expect(stringifyTokens([ctoken('junk # comment')])).toEqual('# comment');
|
||||
expect(stringifyTokens([vtoken('VAR', 'value'), nltoken('\n\n'), ctoken('awesome'), nltoken(), wstoken(' '), nltoken()])).toEqual(
|
||||
'VAR=value\n\n# awesome\n \n',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -0,0 +1,146 @@
|
||||
import Parsimmon from 'parsimmon';
|
||||
import { Token, StringifyConfig, TokenType, VariableToken } from './types';
|
||||
import { getDefaultConfig } from './utils';
|
||||
|
||||
const varName = Parsimmon.regexp(/[a-zA-Z_]+[a-zA-Z0-9_]*/).desc('environment variable name');
|
||||
const equals = Parsimmon.string('=').desc('equals sign');
|
||||
const normalSpaceOpt = Parsimmon.regexp(/[^\S\r\n]*/);
|
||||
const doubleQuote = Parsimmon.string('"');
|
||||
const singleQuote = Parsimmon.string("'");
|
||||
const escapedChar = Parsimmon.regexp(/\\./);
|
||||
const nonEscapedValuePartSQ = Parsimmon.regexp(/[^'\\]+/);
|
||||
const escapedStrSQ = nonEscapedValuePartSQ.or(escapedChar.map((v: string) => v.replace("\\'", "'").replace('\\\\', '\\'))).many();
|
||||
const nonEscapedValuePartDQ = Parsimmon.regexp(/[^"\\]+/);
|
||||
const escapedStrDQ = nonEscapedValuePartDQ.or(escapedChar.map((v: string) => v.replace('\\"', '"').replace('\\\\', '\\'))).many();
|
||||
|
||||
// to alow spaces in simple values: [^"'\s][^\s]*([^\S\r\n]+\S+)* -- but that wont work in BASH so we do not allowed unescaped spaces!
|
||||
const simpleValueWithoutSpacesAndQuotes = Parsimmon.regexp(/[^"'\s][^\s]*/);
|
||||
|
||||
const valueWithSpacesDoubleQuoted = Parsimmon.seqObj<string, any>(['quote', doubleQuote], ['value', escapedStrDQ.map((v) => v.join(''))], doubleQuote);
|
||||
const valueWithSpacesSingleQuoted = Parsimmon.seqObj<string, any>(['quote', singleQuote], ['value', escapedStrSQ.map((v) => v.join(''))], singleQuote);
|
||||
const value = Parsimmon.alt(
|
||||
valueWithSpacesSingleQuoted,
|
||||
valueWithSpacesDoubleQuoted,
|
||||
simpleValueWithoutSpacesAndQuotes.map((v) => ({ quote: '', value: v })),
|
||||
).desc('property value');
|
||||
const commentOpt = Parsimmon.regexp(/[^\S\r\n]+#[^\r\n]*/);
|
||||
const comment = Parsimmon.regexp(/[\n\r]*[^\S\r\n]*#[^\r\n]*/);
|
||||
const newlineOpt = Parsimmon.regexp(/[\n\r]*/);
|
||||
|
||||
const envFile = Parsimmon.seqObj<string | any, any>(
|
||||
['nl_in_front', newlineOpt.desc('new line')],
|
||||
[
|
||||
'content',
|
||||
Parsimmon.alt(
|
||||
comment.desc('comment').map((c) => ({ token: 'comment', value: c })),
|
||||
|
||||
Parsimmon.seqObj<string | any, any>(
|
||||
['beginning', normalSpaceOpt],
|
||||
['name', varName],
|
||||
['equals', Parsimmon.seq(normalSpaceOpt, equals, normalSpaceOpt).map((v) => v.join(''))],
|
||||
['value', value.fallback('')],
|
||||
[
|
||||
'ending',
|
||||
Parsimmon.alt(
|
||||
commentOpt.desc('comment').map((v) => ({ type: 'comment', value: v })),
|
||||
normalSpaceOpt.desc('white space').map((s) => ({ type: 'whitespace', value: s })),
|
||||
).desc('comment or whitespace'),
|
||||
],
|
||||
).map((o) => {
|
||||
o.quote = o.value.quote || '';
|
||||
o.value = o.value.value || '';
|
||||
o.comment = o.ending.type === 'comment' ? o.ending.value : '';
|
||||
o.ending = o.ending.type === 'whitespace' ? o.ending.value : '';
|
||||
o.token = 'variable';
|
||||
return o;
|
||||
}),
|
||||
normalSpaceOpt.map((w) => ({ token: 'whitespace', value: w })),
|
||||
).desc('variable'),
|
||||
],
|
||||
).sepBy(Parsimmon.string('\n').desc('new line'));
|
||||
|
||||
export function parseMultiLine(input: string): Token[] {
|
||||
try {
|
||||
return envFile
|
||||
.tryParse(input)
|
||||
.map((o, i) => {
|
||||
if (i > 0) {
|
||||
o.nl_in_front = '\n' + o.nl_in_front;
|
||||
}
|
||||
return o;
|
||||
})
|
||||
.flatMap((o) => [{ token: 'newline', value: o.nl_in_front }, { ...o.content }])
|
||||
.filter((t) => !((t.token === 'newline' || t.token === 'whitespace') && t.value === ''));
|
||||
} catch (err: any) {
|
||||
return err.message;
|
||||
}
|
||||
}
|
||||
|
||||
export function stringifyTokens(tokens: Token[], config?: StringifyConfig): string {
|
||||
const cfg = getDefaultConfig(config);
|
||||
return tokens
|
||||
.map((t) => {
|
||||
switch (t.token) {
|
||||
case TokenType.COMMENT:
|
||||
if (t.value.length > 0) {
|
||||
if (!t.value.match(/#/)) {
|
||||
t.value = '# ' + t.value;
|
||||
} else if (!t.value.match(/^[^\S\r\n]*#/)) {
|
||||
t.value = t.value.substring(t.value.indexOf('#'));
|
||||
}
|
||||
}
|
||||
return t.value;
|
||||
|
||||
case TokenType.NEWLINE:
|
||||
return t.value.replace(/[^\n\r]/g, '');
|
||||
|
||||
case TokenType.WHITESPACE:
|
||||
return t.value ? t.value.replace(/[^\s]/g, '') : '';
|
||||
|
||||
case TokenType.VARIABLE:
|
||||
const vt = t as VariableToken;
|
||||
if (vt.equals.length > 0 && !vt.equals.match(/^[^\S\r\n]*=[^\S\r\n]*$/)) {
|
||||
vt.equals = '=';
|
||||
}
|
||||
if (vt.beginning.length > 0 && !vt.beginning.match(/^[^\S\r\n]*$/)) {
|
||||
vt.beginning = '';
|
||||
}
|
||||
if (vt.ending.length > 0 && !vt.ending.match(/^[^\S\r\n]*$/)) {
|
||||
vt.ending = '';
|
||||
}
|
||||
if (vt.quote.length > 0 && !vt.quote.match(/^["']$/)) {
|
||||
vt.quote = '';
|
||||
}
|
||||
if (vt.comment.length > 0) {
|
||||
if (!vt.comment.match(/#/)) {
|
||||
vt.comment = ' # ' + vt.comment;
|
||||
} else if (!vt.comment.match(/^[^\S\r\n]+#/)) {
|
||||
vt.comment = ' ' + vt.comment.substring(vt.comment.indexOf('#'));
|
||||
}
|
||||
}
|
||||
if (vt.quote === '') {
|
||||
if (vt.value.length > 0 && (vt.value.match(/^["'\s]/m) || vt.value.match(/\s/))) {
|
||||
vt.quote = '"';
|
||||
}
|
||||
}
|
||||
if (vt.value && vt.value.length > 0) {
|
||||
switch (vt.quote) {
|
||||
case '"':
|
||||
vt.value = vt.value
|
||||
.replace('\\', '\\\\')
|
||||
.replace('\n', '\\n')
|
||||
.replace('\f', '\\f')
|
||||
.replace('\t', '\\t')
|
||||
.replace('\r', '\\r')
|
||||
.replace('"', '\\"');
|
||||
break;
|
||||
case "'":
|
||||
vt.value = vt.value.replace('\\', '\\\\').replace("'", "\\'");
|
||||
break;
|
||||
}
|
||||
}
|
||||
return `${vt.beginning}${vt.name}${vt.equals}${vt.quote}${vt.value || ''}${vt.quote}${vt.comment}${vt.ending}`;
|
||||
}
|
||||
})
|
||||
.join('');
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
import { Token } from '../../types';
|
||||
|
||||
declare global {
|
||||
function vtoken(name: string, value: string, quote?: string, comment?: string, beginning?: string, ending?: string, equals?: string): Token;
|
||||
function nltoken(value?: string): Token;
|
||||
function wstoken(value?: string): Token;
|
||||
function ctoken(value?: string): Token;
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
import { Token, TokenType } from '../../types';
|
||||
|
||||
global.vtoken = (
|
||||
name: string,
|
||||
value: string,
|
||||
quote: string = '',
|
||||
comment: string = '',
|
||||
beginning: string = '',
|
||||
ending: string = '',
|
||||
equals: string = '=',
|
||||
): Token => {
|
||||
return {
|
||||
beginning,
|
||||
comment,
|
||||
ending,
|
||||
equals,
|
||||
name,
|
||||
quote,
|
||||
token: TokenType.VARIABLE,
|
||||
value,
|
||||
};
|
||||
};
|
||||
|
||||
global.nltoken = (value: string = '\n'): Token => {
|
||||
return {
|
||||
token: TokenType.NEWLINE,
|
||||
value,
|
||||
};
|
||||
}
|
||||
|
||||
global.wstoken = (value: string = ''): Token => {
|
||||
return {
|
||||
token: TokenType.WHITESPACE,
|
||||
value,
|
||||
};
|
||||
}
|
||||
|
||||
global.ctoken = (value: string = ''): Token => {
|
||||
return {
|
||||
token: TokenType.COMMENT,
|
||||
value,
|
||||
};
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
export enum TokenType {
|
||||
VARIABLE = 'variable',
|
||||
NEWLINE = 'newline',
|
||||
WHITESPACE = 'whitespace',
|
||||
COMMENT = 'comment',
|
||||
}
|
||||
|
||||
export type SimpleToken = {
|
||||
token: TokenType;
|
||||
value: string;
|
||||
};
|
||||
|
||||
export type VariableToken = {
|
||||
name: string;
|
||||
quote: string;
|
||||
comment: string;
|
||||
beginning: string;
|
||||
ending: string;
|
||||
equals: string;
|
||||
} & SimpleToken;
|
||||
|
||||
export type Token = SimpleToken | VariableToken;
|
||||
|
||||
export enum ModifyMode {
|
||||
EXISTING_ONLY = 'existing-only',
|
||||
APPEND = 'append',
|
||||
SMART_APPEND = 'smart-append',
|
||||
}
|
||||
|
||||
export type Variable = {
|
||||
name: string;
|
||||
value: string | undefined | null;
|
||||
comment?: string;
|
||||
};
|
||||
|
||||
export type StringifyConfig = {
|
||||
normalize?: boolean;
|
||||
enforceNewLineEnd?: boolean;
|
||||
modifyMode?: ModifyMode;
|
||||
};
|
||||
|
||||
export type VariableTuple = [string, string | null];
|
||||
export type VariableToUpdate = Variable | VariableToken | VariableTuple;
|
@ -0,0 +1,20 @@
|
||||
import { compareWeighted } from "./utils";
|
||||
|
||||
describe('Test utils', () => {
|
||||
it('works', () => {
|
||||
expect(compareWeighted('a','b')).toEqual(-1);
|
||||
expect(compareWeighted('a','a')).toEqual(0);
|
||||
expect(compareWeighted('b','a')).toEqual(1);
|
||||
expect(compareWeighted('a','')).toEqual(1);
|
||||
expect(compareWeighted('','a')).toEqual(-1);
|
||||
expect(compareWeighted('','')).toEqual(0);
|
||||
expect(compareWeighted('LOGIN_C','ANOTHER_Z')).toEqual(1);
|
||||
expect(compareWeighted('LOGIN_C','NEXT_Z')).toEqual(-1);
|
||||
expect(compareWeighted('LOGIN_C','LOGIN_A')).toEqual(7);
|
||||
expect(compareWeighted('LOGIN_C','LOGIN_C')).toEqual(0);
|
||||
expect(compareWeighted('LOGIN_C','LOGIN_')).toEqual(7);
|
||||
expect(compareWeighted('LOGIN_C','LOGIN_CA')).toEqual(-8);
|
||||
expect(compareWeighted('LOGIN_C','LOGIN_Z')).toEqual(-7);
|
||||
})
|
||||
|
||||
})
|
@ -0,0 +1,19 @@
|
||||
import { ModifyMode, StringifyConfig } from './types';
|
||||
|
||||
const DEFAULT_CONFIG = { modifyMode: ModifyMode.SMART_APPEND, normalize: false, enforceNewLineEnd: true };
|
||||
|
||||
export function getDefaultConfig(customConfig?: StringifyConfig): StringifyConfig {
|
||||
return { ...DEFAULT_CONFIG, ...(customConfig || {})};
|
||||
}
|
||||
|
||||
function findFirstDiffPos(a: string, b: string) {
|
||||
var i = 0;
|
||||
if (a === b) return -1;
|
||||
while (a[i] === b[i]) i++;
|
||||
return i;
|
||||
}
|
||||
|
||||
export function compareWeighted(a: string, b: string): number {
|
||||
const diffPos = findFirstDiffPos(a, b);
|
||||
return a.localeCompare(b) * (diffPos > -1 ? diffPos + 1 : 1);
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"rootDir": "src",
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"sourceMap": true,
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node"
|
||||
},
|
||||
"include": ["./src/*.ts", "./src/tests/utils/**.d.ts"]
|
||||
}
|
Loading…
Reference in New Issue