Version 1.0.0

master v1.0.0
Dominik Dzienia 2 years ago
parent 0d89f36319
commit 20ebc2e23c

@ -0,0 +1 @@
ALA=makota

3
.gitignore vendored

@ -1,3 +1,4 @@
node_modules node_modules
coverage coverage
dist dist
.env

@ -0,0 +1 @@
registry=https://npm.dzienia.pl/

@ -2,5 +2,6 @@
"singleQuote": true, "singleQuote": true,
"semi": true, "semi": true,
"trailingComma": "all", "trailingComma": "all",
"printWidth": 160 "printWidth": 160,
"endOfLine": "lf"
} }

@ -0,0 +1,62 @@
The command line tool to read, manipulate and update .env files.
## Setup
```bash
npm install -g dotenv-tool --registry https://npm.dzienia.pl
```
## Command Line
If input or output file(s) are not specified, `dotenv-tool` will use `stdin` or `stdout`
```bash
$ echo -n "VAR1=value1\nVAR2=value2" | dotenv-tool read VAR1 > result.txt
$ cat result.txt
value1
```
### Modify files
```bash
$ dotenv-tool -h
```
```
Usage: dotenv-tool [options] [command] [paramsToSet]
Tool to read and update .env files
Arguments:
paramsToSet space separated list of additional envs to set, in format key=value (default: "")
Options:
-v, --version output the version number
-i, --files <filePaths...> Input file(s)
-o, --outputFile <filePath> Output file
-m, --modify Modify first input file
-s, --silent Mute all messages and errors
-h, --help display help for command
Commands:
get [options] <key> Returns given variable from env file (if specified)
```
### Read prop from file
```bash
$ dotenv-tool get -h
```
```
Usage: dotenv-tool get [options] <key>
Returns given variable from env file (if specified)
Arguments:
key env variable name a.k.a. key
Options:
-f, --file <filePath> Input file to parse (if not given, stdio is used)
-h, --help display help for command
```

@ -7,10 +7,16 @@ module.exports = {
"**/__tests__/**/*.+(ts|tsx|js)", "**/__tests__/**/*.+(ts|tsx|js)",
"**/?(*.)+(spec|test).+(ts|tsx|js)" "**/?(*.)+(spec|test).+(ts|tsx|js)"
], ],
"testPathIgnorePatterns" : [ "testPathIgnorePatterns": [
".*/tests/.*.ts" ".*/tests/.*.ts"
], ],
"transform": { "transform": {
"^.+\\.(ts|tsx)$": "ts-jest" "^.+\\.(ts|tsx)$": "ts-jest"
} },
"coveragePathIgnorePatterns": [
"node_modules",
".module.ts",
"<rootDir>/src/binutils.ts",
".mock.ts"
],
} }

158
package-lock.json generated

@ -1,29 +1,33 @@
{ {
"name": "dotenv-tool", "name": "dotenv-tool",
"version": "1.0.0", "version": "1.0.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "dotenv-tool", "name": "dotenv-tool",
"version": "1.0.0", "version": "1.0.1",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@bconnorwhite/module": "^2.0.2",
"commander": "^11.0.0", "commander": "^11.0.0",
"commander-version": "^3.0.0",
"parsimmon": "^1.18.1" "parsimmon": "^1.18.1"
}, },
"bin": { "bin": {
"dotenv-tool": "dist/index.js" "dotenv-tool": "dist/cjs/src/bin.js"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^29.5.3", "@types/jest": "^29.5.3",
"@types/mock-fs": "^4.13.1",
"@types/node": "^20.4.1", "@types/node": "^20.4.1",
"@types/parsimmon": "^1.10.6", "@types/parsimmon": "^1.10.6",
"jest": "^29.6.1", "jest": "^29.6.1",
"jest-mock-process": "^2.0.0",
"mock-fs": "^5.2.0",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"typescript": "^5.1.6" "typescript": "^5.1.6"
},
"engines": {
"node": ">=14"
} }
}, },
"node_modules/@ampproject/remapping": { "node_modules/@ampproject/remapping": {
@ -598,16 +602,6 @@
"integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==",
"dev": true "dev": true
}, },
"node_modules/@bconnorwhite/module": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@bconnorwhite/module/-/module-2.0.2.tgz",
"integrity": "sha512-ck1me5WMgZKp06gnJrVKEkytpehTTQbvsAMbF1nGPeHri/AZNhj87++PSE2LOxmZqM0EtGMaqeLdx7Lw7SUnTA==",
"dependencies": {
"find-up": "^5.0.0",
"read-json-safe": "^1.0.5",
"types-pkg-json": "^1.1.0"
}
},
"node_modules/@istanbuljs/load-nyc-config": { "node_modules/@istanbuljs/load-nyc-config": {
"version": "1.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
@ -1134,6 +1128,15 @@
"pretty-format": "^29.0.0" "pretty-format": "^29.0.0"
} }
}, },
"node_modules/@types/mock-fs": {
"version": "4.13.1",
"resolved": "https://registry.npmjs.org/@types/mock-fs/-/mock-fs-4.13.1.tgz",
"integrity": "sha512-m6nFAJ3lBSnqbvDZioawRvpLXSaPyn52Srf7OfzjubYbYX8MTUdIgDxQl0wEapm4m/pNYSd9TXocpQ0TvZFlYA==",
"dev": true,
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "20.4.1", "version": "20.4.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.1.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.1.tgz",
@ -1552,23 +1555,6 @@
"node": ">=16" "node": ">=16"
} }
}, },
"node_modules/commander-version": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/commander-version/-/commander-version-3.0.0.tgz",
"integrity": "sha512-7czv7cyvaNIMficskfWAKDK0sMo9LJEUQSQWXsnULtq8I0F2BJJAb+kdwrDk/iu8pfU1u6M1n33KV3jL8HLOmg==",
"dependencies": {
"@bconnorwhite/module": "^2.0.2",
"commander": "^7.2.0"
}
},
"node_modules/commander-version/node_modules/commander": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz",
"integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==",
"engines": {
"node": ">= 10"
}
},
"node_modules/concat-map": { "node_modules/concat-map": {
"version": "0.0.1", "version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@ -1785,21 +1771,6 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/find-up": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
"dependencies": {
"locate-path": "^6.0.0",
"path-exists": "^4.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/fs.realpath": { "node_modules/fs.realpath": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
@ -2414,6 +2385,15 @@
"node": "^14.15.0 || ^16.10.0 || >=18.0.0" "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
} }
}, },
"node_modules/jest-mock-process": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/jest-mock-process/-/jest-mock-process-2.0.0.tgz",
"integrity": "sha512-bybzszPfvrYhplymvUNFc130ryvjSCW1JSCrLA0LiV0Sv9TrI+cz90n3UYUPoT2nhNL6c6IV9LxUSFJF9L9tHQ==",
"dev": true,
"peerDependencies": {
"jest": ">=23.4"
}
},
"node_modules/jest-pnp-resolver": { "node_modules/jest-pnp-resolver": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz",
@ -2771,20 +2751,6 @@
"integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
"dev": true "dev": true
}, },
"node_modules/locate-path": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
"dependencies": {
"p-locate": "^5.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/lodash.memoize": { "node_modules/lodash.memoize": {
"version": "4.1.2", "version": "4.1.2",
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
@ -2870,6 +2836,15 @@
"node": "*" "node": "*"
} }
}, },
"node_modules/mock-fs": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-5.2.0.tgz",
"integrity": "sha512-2dF2R6YMSZbpip1V1WHKGLNjr/k48uQClqMVb5H3MOvwc9qhYis3/IWbj02qIg/Y8MDXKFF4c5v0rxx2o6xTZw==",
"dev": true,
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/ms": { "node_modules/ms": {
"version": "2.1.2", "version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
@ -2943,6 +2918,7 @@
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
"dev": true,
"dependencies": { "dependencies": {
"yocto-queue": "^0.1.0" "yocto-queue": "^0.1.0"
}, },
@ -2953,20 +2929,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/p-locate": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
"dependencies": {
"p-limit": "^3.0.2"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/p-try": { "node_modules/p-try": {
"version": "2.2.0", "version": "2.2.0",
"resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
@ -2994,14 +2956,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/parse-json-object": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/parse-json-object/-/parse-json-object-1.1.0.tgz",
"integrity": "sha512-4w5s6uJY1tW9REY8UwUOyaZKSKsrbQrMEzlV/Le/g5t4iMWuuyK83pZZ0OZimSOL9iyv2ORvRSgz71Ekd7iD3g==",
"dependencies": {
"types-json": "^1.0.6"
}
},
"node_modules/parsimmon": { "node_modules/parsimmon": {
"version": "1.18.1", "version": "1.18.1",
"resolved": "https://registry.npmjs.org/parsimmon/-/parsimmon-1.18.1.tgz", "resolved": "https://registry.npmjs.org/parsimmon/-/parsimmon-1.18.1.tgz",
@ -3011,6 +2965,7 @@
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@ -3191,20 +3146,6 @@
"integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==",
"dev": true "dev": true
}, },
"node_modules/read-file-safe": {
"version": "1.0.10",
"resolved": "https://registry.npmjs.org/read-file-safe/-/read-file-safe-1.0.10.tgz",
"integrity": "sha512-qW25fd2uMX3dV6Ui/R0jYK1MhTpjx8FO/VHaHTXzwWsGnkNwLRcqYfCXd9qDM+NZ273DPUvP2RaimYuLSu1K/g=="
},
"node_modules/read-json-safe": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/read-json-safe/-/read-json-safe-1.0.5.tgz",
"integrity": "sha512-SJyNY/U9+vW35FPus22Qvv1oilnR7PCfN2E70uKQEGaJS313A5/cz9Yhv7ZtWzZ+XIwrtEPxXf10BOyYemHehA==",
"dependencies": {
"parse-json-object": "^1.0.5",
"read-file-safe": "^1.0.5"
}
},
"node_modules/require-directory": { "node_modules/require-directory": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
@ -3580,28 +3521,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/types-eslintrc": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/types-eslintrc/-/types-eslintrc-1.0.3.tgz",
"integrity": "sha512-zKTR6aKHEudQpl+JoZjS3qh0B5IzSpQK/BCpYBECujcnKtqL87DJJ1sJKe5B8k/y8/UJ5sukq42QDvlaJyCO2w==",
"dependencies": {
"types-json": "^1.2.2"
}
},
"node_modules/types-json": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/types-json/-/types-json-1.2.2.tgz",
"integrity": "sha512-VfVLISHypS7ayIHvhacOESOTib4Sm4mAhnsgR8fzQdGp89YoBwMqvGmqENjtYehUQzgclT+7NafpEXkK/MHKwA=="
},
"node_modules/types-pkg-json": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/types-pkg-json/-/types-pkg-json-1.2.1.tgz",
"integrity": "sha512-Wj75lCkPwfj1BhmaJxMPpTQj9YGpihjs3WICigt1IjTAswr7zPXP0iJYPZjU0Rw/IriODhMJjAImkCIxt9KeuQ==",
"dependencies": {
"types-eslintrc": "^1.0.3",
"types-json": "^1.2.2"
}
},
"node_modules/typescript": { "node_modules/typescript": {
"version": "5.1.6", "version": "5.1.6",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz",
@ -3771,6 +3690,7 @@
"version": "0.1.0", "version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
"dev": true,
"engines": { "engines": {
"node": ">=10" "node": ">=10"
}, },

@ -2,28 +2,58 @@
"name": "dotenv-tool", "name": "dotenv-tool",
"version": "1.0.0", "version": "1.0.0",
"description": "Tool to read and update .env files", "description": "Tool to read and update .env files",
"main": "dist/index.js", "repository": {
"bin": { "type": "git",
"dotenv-tool": "./dist/index.js" "url": "https://gitea.dzienia.pl/shared/dotenv-tool.git"
}, },
"license": "MIT",
"author": "Dominik Dzienia <dominik.dzienia@gmail.com>",
"exports": {
".": {
"import": {
"types": "./dist/mjs/index.d.ts",
"default": "./dist/mjs/index.js"
},
"require": {
"types": "./dist/cjs/src/index.d.ts",
"default": "./dist/cjs/src/index.js"
}
}
},
"main": "./dist/cjs/src/index.js",
"module": "./dist/mjs/index.js",
"types": "./dist/mjs/index.d.ts",
"bin": "./dist/cjs/src/bin.js",
"files": [
"dist"
],
"scripts": { "scripts": {
"build": "npx tsc", "coverage": "jest --coverage",
"help": "npx . --help",
"preprepare": "rm -rf dist",
"prepare": "npx tsc -p tsconfig.json && npx tsc -p tsconfig-esm.json",
"postprepare": "node update-versions.js",
"prepublishOnly": "git push origin --follow-tags",
"test": "jest", "test": "jest",
"coverage": "jest --coverage" "preversion": "npm test",
"postversion": "npm publish"
},
"dependencies": {
"commander": "^11.0.0",
"parsimmon": "^1.18.1"
}, },
"author": "Dominik Dzienia <dominik.dzienia@gmail.com>",
"license": "MIT",
"devDependencies": { "devDependencies": {
"@types/jest": "^29.5.3", "@types/jest": "^29.5.3",
"@types/mock-fs": "^4.13.1",
"@types/node": "^20.4.1", "@types/node": "^20.4.1",
"@types/parsimmon": "^1.10.6", "@types/parsimmon": "^1.10.6",
"jest": "^29.6.1", "jest": "^29.6.1",
"jest-mock-process": "^2.0.0",
"mock-fs": "^5.2.0",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"typescript": "^5.1.6" "typescript": "^5.1.6"
}, },
"dependencies": { "engines": {
"@bconnorwhite/module": "^2.0.2", "node": ">=14"
"commander": "^11.0.0",
"parsimmon": "^1.18.1"
} }
} }

@ -0,0 +1,497 @@
import { mockProcessExit, mockProcessStderr, mockProcessStdout } from 'jest-mock-process';
import { makeProgram } from './binlib';
import { Command } from 'commander';
import mock from 'mock-fs';
import fs from 'fs';
let mockStdout: jest.SpyInstance;
let mockStderr: jest.SpyInstance;
let mockExit: jest.SpyInstance;
let program: Command;
let stdinContents = '';
function getContents() {
return stdinContents;
}
jest.mock('./binutils.ts', () => ({
stdinToString: jest.fn(() => getContents()),
}));
beforeEach(() => {
mockStdout = mockProcessStdout();
mockStderr = mockProcessStderr();
program = makeProgram();
program.exitOverride();
mockExit = mockProcessExit();
mock({
'.env': 'ALA=makota',
'ugly.env': '########\n\n SOME = value \n\n\n ANOTHER= "value with space"',
'first.env': 'SERVER_PORT=80\nSERVER_HOST=localhost',
'second.env': 'SERVER_PORT=8080\nSERVER_PASSWORD=secret\nCLIENT_HOST=192.168.4.42',
'third.env': 'CLIENT_PORT=3000',
'broken.env': 'CLI ENT_PORT=3000',
'read-only.env': mock.file({
content: 'READ=only',
mode: 0o444,
}),
});
stdinContents = '';
});
afterEach(() => {
mockStdout.mockRestore();
mockStderr.mockRestore();
mockExit.mockRestore();
mock.restore();
});
describe('Standard utils', () => {
it('outputs help', () => {
expect(() => {
program.parse(['node', 'test', '--help']);
}).toThrow();
expect(mockStdout).toHaveBeenCalledWith(expect.stringContaining('Usage: dotenv-tool [options] [command] [paramsToSet...]'));
});
it('outputs version', () => {
expect(() => {
program.parse(['node', 'test', '-v']);
}).toThrow(
expect.objectContaining({
message: expect.stringMatching(/[0-9]+\.[0-9]+\.[0-9]+/),
}),
);
});
});
describe('Read command', () => {
it('reads from empty stdin', () => {
stdinContents = '';
program.parse(['node', 'test', 'get', 'TEST']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(1);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Variable TEST not found in stdin/));
});
it('reads from invalid stdin', () => {
stdinContents = 'junks';
program.parse(['node', 'test', 'get', 'TEST']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Parsing stdin failed/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/PARSING FAILED/));
});
it('reads from correct stdin', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', 'get', 'TEST']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('works');
});
it('writes result to file', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', 'get', '-o', 'test.txt', 'TEST']);
expect(mockExit).toBeCalled();
expect(fs.readFileSync('test.txt').toString()).toEqual('works');
});
it('reads from correct file', () => {
program.parse(['node', 'test', 'get', '-f', '.env', 'ALA']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('makota');
});
});
describe('Update command', () => {
it('creates empty output without params or stdin', () => {
stdinContents = '';
program.parse(['node', 'test']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('\n');
});
it('uses stdin input', () => {
stdinContents = 'VARIABLE=VALUE';
program.parse(['node', 'test']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('VARIABLE=VALUE\n');
});
it('appends empty stdin', () => {
stdinContents = '';
program.parse(['node', 'test', 'ANOTHER=ok']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('ANOTHER=ok\n');
});
it('reads from invalid stdin', () => {
stdinContents = 'junks';
program.parse(['node', 'test']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Updating failed, cannot parse source stdin/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/PARSING FAILED/));
});
it('updates correct stdin', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', 'TEST=new value']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('TEST="new value"\n');
});
it('appends correct stdin', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', 'ANOTHER=ok']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('TEST=works\nANOTHER=ok\n');
});
it('writes result to file', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', '-o', 'test.txt', 'TEST=new']);
expect(mockExit).toBeCalled();
expect(fs.readFileSync('test.txt').toString()).toEqual('TEST=new\n');
});
it('writes result to read-only file', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', '-o', 'read-only.env', 'TEST=new']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Updating failed - other error/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/EACCES/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/read-only.env/));
});
it('reads from correct file', () => {
program.parse(['node', 'test', '-i', '.env', '--', 'ALA=ma psa']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('ALA="ma psa"\n');
});
it('beautify input file', () => {
program.parse(['node', 'test', '-b', '-i', 'ugly.env']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('########\n\nSOME=value\n\nANOTHER="value with space"\n');
});
it('reading non-existing file', () => {
program.parse(['node', 'test', '-b', '-i', 'i-am-not-here.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Updating failed, cannot read file: i-am-not-here.env/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/ENOENT/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/i-am-not-here.env/));
});
it('modify input file', () => {
fs.writeFileSync('inplace.env', '########\n\n SOME = value \n\n\n ANOTHER= "value with space"'),
program.parse(['node', 'test', '-b', '-m', '-i', 'inplace.env']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledTimes(0);
expect(fs.readFileSync('inplace.env').toString()).toEqual('########\n\nSOME=value\n\nANOTHER="value with space"\n');
});
it('do not modify input file if invalid', () => {
const data = '########\n\n IN VALID = value \n\n\n ANOTHER= "value with space"';
fs.writeFileSync('invalid.env', data), program.parse(['node', 'test', '-b', '-m', '-i', 'invalid.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStdout).toHaveBeenCalledTimes(0);
expect(fs.readFileSync('invalid.env').toString()).toEqual(data);
expect(mockStderr).toBeCalledTimes(4);
// normally, exit will finish
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Updating failed, cannot parse source file invalid.env/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/PARSING FAILED/));
expect(mockStderr).toHaveBeenNthCalledWith(3, expect.stringMatching(/Overwriting invalid source file not possible: invalid.env/));
expect(mockStderr).toHaveBeenNthCalledWith(4, expect.stringMatching(/PARSING FAILED/));
});
it('merge files', () => {
program.parse(['node', 'test', '-i', 'first.env', 'second.env', 'third.env']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('SERVER_PORT=8080\nSERVER_HOST=localhost\nSERVER_PASSWORD=secret\nCLIENT_HOST=192.168.4.42\nCLIENT_PORT=3000\n');
});
it('merge files with smart append', () => {
program.parse(['node', 'test', '-p', '-i', 'first.env', 'second.env', 'third.env']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('CLIENT_HOST=192.168.4.42\nCLIENT_PORT=3000\nSERVER_PASSWORD=secret\nSERVER_PORT=8080\nSERVER_HOST=localhost\n');
});
it('merge files and set from parameter', () => {
program.parse(['node', 'test', '-i', 'first.env', 'second.env', 'third.env', '--', 'SERVER_PASSWORD=updated value']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith(
'SERVER_PORT=8080\nSERVER_HOST=localhost\nSERVER_PASSWORD="updated value"\nCLIENT_HOST=192.168.4.42\nCLIENT_PORT=3000\n',
);
});
it('merge files and set from parameter and invalid parameter', () => {
program.parse(['node', 'test', '-i', 'first.env', 'second.env', 'third.env', '--', 'SERVER_PASSWORD=updated value', 'INVA LID', 'LAST_ARGUMENT=correct']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Updating failed, cannot parse params passed/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/Invalid argument/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/INVA LID/));
});
it('merge files and set from parameter and invalid parameter', () => {
program.parse([
'node',
'test',
'-i',
'first.env',
'second.env',
'third.env',
'--',
'SERVER_PASSWORD=updated value',
'INVA-LID=wrong',
'LAST_ARGUMENT=correct',
]);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Updating failed, invalid param/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/PARSING FAILED/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/INVA-LID/));
});
it('merge files with invalid file name', () => {
program.parse(['node', 'test', '-i', 'first.env', 'invalid.env', 'third.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Updating failed, cannot read file: invalid.env/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/ENOENT/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/invalid.env/));
});
it('merge files with broken file contents', () => {
program.parse(['node', 'test', '-i', 'first.env', 'second.env', 'broken.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Updating failed, cannot parse file: broken.env/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/PARSING FAILED/));
});
});
describe('Update command resiliency', () => {
it('reads from invalid stdin', () => {
stdinContents = 'junks';
program.parse(['node', 'test', '-r']);
expect(mockExit).toBeCalled();
expect(mockStderr).toHaveBeenCalledTimes(0);
expect(mockStdout).toHaveBeenCalledWith('\n');
});
it('reading non-existing file', () => {
program.parse(['node', 'test', '-r', '-b', '-i', 'i-am-not-here.env']);
expect(mockExit).toBeCalled();
expect(mockStderr).toHaveBeenCalledTimes(0);
expect(mockStdout).toHaveBeenCalledWith('\n');
});
it('writes result to read-only file', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', '-r', '-o', 'read-only.env', 'TEST=new']);
expect(mockExit).toBeCalled();
expect(mockStderr).toHaveBeenCalledTimes(0);
});
it('do not modify input file if invalid', () => {
const data = '########\n\n IN VALID = value \n\n\n ANOTHER= "value with space"';
fs.writeFileSync('invalid.env', data), program.parse(['node', 'test', '-r', '-b', '-m', '-i', 'invalid.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStdout).toHaveBeenCalledTimes(0);
expect(fs.readFileSync('invalid.env').toString()).toEqual(data);
expect(mockStderr).toBeCalledTimes(2);
expect(mockStderr).toHaveBeenNthCalledWith(1, expect.stringMatching(/Overwriting invalid source file not possible: invalid.env/));
expect(mockStderr).toHaveBeenNthCalledWith(2, expect.stringMatching(/PARSING FAILED/));
});
it('merge files with invalid file name', () => {
program.parse(['node', 'test', '-r', '-i', 'first.env', 'invalid.env', 'third.env']);
expect(mockExit).toBeCalled();
expect(mockStderr).toHaveBeenCalledTimes(0);
expect(mockStdout).toHaveBeenCalledWith('SERVER_PORT=80\nSERVER_HOST=localhost\nCLIENT_PORT=3000\n');
});
it('merge files with broken file contents', () => {
program.parse(['node', 'test', '-r', '-i', 'first.env', 'second.env', 'broken.env']);
expect(mockExit).toBeCalled();
expect(mockStderr).toHaveBeenCalledTimes(0);
expect(mockStdout).toHaveBeenCalledWith('SERVER_PORT=8080\nSERVER_HOST=localhost\nSERVER_PASSWORD=secret\nCLIENT_HOST=192.168.4.42\n');
});
it('merge files and set from parameter and not parsable parameter', () => {
program.parse([
'node',
'test',
'-r',
'-i',
'first.env',
'second.env',
'third.env',
'--',
'SERVER_PASSWORD=updated value',
'INVA-LID=par',
'LAST_ARGUMENT=correct',
]);
expect(mockExit).toBeCalled();
expect(mockStderr).toHaveBeenCalledTimes(0);
expect(mockStdout).toHaveBeenCalledWith(
'SERVER_PORT=8080\nSERVER_HOST=localhost\nSERVER_PASSWORD="updated value"\nCLIENT_HOST=192.168.4.42\nCLIENT_PORT=3000\nLAST_ARGUMENT=correct\n',
);
});
it('merge files and set from parameter and not parsable parameter', () => {
program.parse([
'node',
'test',
'-r',
'-i',
'first.env',
'second.env',
'third.env',
'--',
'SERVER_PASSWORD=updated value',
'INVA LID',
'LAST_ARGUMENT=correct',
]);
expect(mockExit).toBeCalled();
expect(mockStderr).toHaveBeenCalledTimes(0);
expect(mockStdout).toHaveBeenCalledWith(
'SERVER_PORT=8080\nSERVER_HOST=localhost\nSERVER_PASSWORD="updated value"\nCLIENT_HOST=192.168.4.42\nCLIENT_PORT=3000\nLAST_ARGUMENT=correct\n',
);
});
});
describe('Silent flag', () => {
it('reads from empty stdin', () => {
stdinContents = '';
program.parse(['node', 'test', '-s', 'get', 'TEST']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('reads from invalid stdin', () => {
stdinContents = 'junks';
program.parse(['node', 'test', '-s', 'get', 'TEST']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('reads from invalid stdin', () => {
stdinContents = 'junks';
program.parse(['node', 'test', '-s']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('writes result to read-only file', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', '-s', '-o', 'read-only.env', 'TEST=new']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('reading non-existing file', () => {
program.parse(['node', 'test', '-s', '-b', '-i', 'i-am-not-here.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('do not modify input file if invalid', () => {
const data = '########\n\n IN VALID = value \n\n\n ANOTHER= "value with space"';
fs.writeFileSync('invalid.env', data), program.parse(['node', 'test', '-s', '-b', '-m', '-i', 'invalid.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('merge files and set from parameter and invalid parameter', () => {
program.parse([
'node',
'test',
'-s',
'-i',
'first.env',
'second.env',
'third.env',
'--',
'SERVER_PASSWORD=updated value',
'INVA LID',
'LAST_ARGUMENT=correct',
]);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('merge files and set from parameter and invalid parameter', () => {
program.parse([
'node',
'test',
'-s',
'-i',
'first.env',
'second.env',
'third.env',
'--',
'SERVER_PASSWORD=updated value',
'INVA-LID=wrong',
'LAST_ARGUMENT=correct',
]);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('merge files with invalid file name', () => {
program.parse(['node', 'test', '-s', '-i', 'first.env', 'invalid.env', 'third.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('merge files with broken file contents', () => {
program.parse(['node', 'test', '-s', '-i', 'first.env', 'second.env', 'broken.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
it('do not modify input file if invalid', () => {
const data = '########\n\n IN VALID = value \n\n\n ANOTHER= "value with space"';
fs.writeFileSync('invalid.env', data), program.parse(['node', 'test', '-s', '-r', '-b', '-m', '-i', 'invalid.env']);
expect(mockExit).toHaveBeenCalledWith(1);
expect(mockStderr).toBeCalledTimes(0);
});
});
describe('Update command without appending', () => {
it('do not appends empty stdin', () => {
stdinContents = '';
program.parse(['node', 'test', '-u', 'ANOTHER=ok']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('\n');
});
it('do not appends correct stdin', () => {
stdinContents = 'TEST=works';
program.parse(['node', 'test', '-u', 'ANOTHER=ok']);
expect(mockExit).toBeCalled();
expect(mockStdout).toHaveBeenCalledWith('TEST=works\n');
});
it('merge files', () => {
program.parse(['node', 'test', '-u', '-i', 'first.env', 'second.env', 'third.env']);
expect(mockExit).toBeCalled();
expect(mockStderr).toBeCalledTimes(0);
expect(mockStdout).toHaveBeenCalledWith('SERVER_PORT=8080\nSERVER_HOST=localhost\n');
});
it('merge files and set from parameter and not append new', () => {
program.parse(['node', 'test', '-u', '-i', 'first.env', 'second.env', 'third.env', '--', 'SERVER_PORT=443', 'LAST_ARGUMENT=correct']);
expect(mockExit).toBeCalled();
expect(mockStderr).toBeCalledTimes(0);
expect(mockStdout).toHaveBeenCalledWith('SERVER_PORT=443\nSERVER_HOST=localhost\n');
});
});

@ -0,0 +1,6 @@
#! /usr/bin/env node
import { makeProgram } from "./binlib";
const program = makeProgram();
program.parse(process.argv);

@ -0,0 +1,28 @@
import { normalizeParams } from './binlib';
describe('Normalizing input variables', () => {
it('is works on empty param list', () => {
expect(normalizeParams([])).toEqual([]);
});
it('handles standard cases', () => {
expect(normalizeParams(['aaa=bbb', 'ccc=ddd'])).toEqual(['aaa=bbb', 'ccc=ddd']);
expect(normalizeParams(['aaa =bbb', 'ccc= ddd', 'eee = fff'])).toEqual(['aaa=bbb', 'ccc=" ddd"', 'eee=" fff"']);
});
it('joins', () => {
expect(normalizeParams(['aaa', '=', 'bbb', 'ccc', '=', 'ddd'])).toEqual(['aaa=bbb', 'ccc=ddd']);
});
it('not alow empty props when escaped', () => {
expect(() => normalizeParams(['aaa', '='])).toThrowError(/Invalid argument/);
});
it('allow empty props when properly formatted', () => {
expect(normalizeParams(['aaa='])).toEqual(['aaa=']);
});
it('escapes strings with spaces or double quotes', () => {
expect(normalizeParams(['aaa=ala ma kota', 'ccc="ddd'])).toEqual(['aaa=\"ala ma kota\"', 'ccc=\"\\\"ddd\"']);
});
});

@ -0,0 +1,214 @@
import { Command } from 'commander';
import fs from 'fs';
import { version } from '../package.json';
import { stdinToString } from './binutils';
import { parseMultiLine, stringifyTokens } from './parser';
import { Config, ModifyMode, TokenType, VariableToken } from './types';
import { update } from './manipulation';
export function normalizeParamVar(name: string, value: string): string {
let fixValue = value;
if (value.match(/[\s"']/)) {
fixValue = `"${value.replace(/\"/g, '\\"')}"`;
}
return `${name.trim()}=${fixValue}`;
}
export function normalizeParams(rawParams: string[], resilient = false): string[] {
const paramsFixed: string[] = [];
let buffer: string[] = [];
const bufferFlush = () => {
const joined = buffer.join('');
const bPos = joined.indexOf('=');
if (bPos > -1) {
paramsFixed.push(normalizeParamVar(joined.substring(0, bPos), joined.substring(bPos + 1)));
} else {
if (!resilient) {
throw new Error(`Invalid argument: ${joined}`);
}
}
buffer = [];
};
rawParams.forEach((ps: string, idx) => {
const eqPos = ps.indexOf('=');
if ((eqPos >= 0 && ps.length > 1) || idx == rawParams.length - 1) {
if (eqPos == -1) {
buffer.push(ps);
}
if (buffer.length > 0) {
bufferFlush();
}
if (eqPos > -1) {
paramsFixed.push(normalizeParamVar(ps.substring(0, eqPos), ps.substring(eqPos + 1)));
}
} else {
buffer.push(ps);
if (buffer.length >= 2 && buffer[buffer.length - 2] == '=') {
bufferFlush();
}
}
});
return paramsFixed;
}
function makeReadCommand() {
const getCmd = new Command('get');
getCmd
.description('Returns given variable from env file (if specified)')
.option('-f, --file <filePath>', 'Input file to parse (if not given, stdio is used)')
.argument('<key>', 'env variable name a.k.a. key')
.action((key, options, cmd) => {
const globOpts = cmd.optsWithGlobals();
const data = options.file ? fs.readFileSync(options.file).toString() : stdinToString();
const sourceName = options.file ? `file ${options.file}` : `stdin`;
try {
const tokens = parseMultiLine(data);
const found = tokens.filter((t) => t.token === TokenType.VARIABLE && (t as VariableToken).name == key);
if (found.length > 0) {
if (globOpts.outputFile) {
fs.writeFileSync(globOpts.outputFile, found[0].value);
} else {
process.stdout.write(found[0].value);
process.stdout.write('\n');
}
process.exit();
} else {
if (!globOpts.silent) process.stderr.write(`Variable ${key} not found in ${sourceName}\n`);
process.exit(1);
}
} catch (e: any) {
if (!globOpts.silent) process.stderr.write(`Parsing ${sourceName} failed\n`);
if (!globOpts.silent) process.stderr.write(e.toString() + '\n');
process.exit(1);
}
});
return getCmd;
}
export function makeProgram() {
const program = new Command();
program
.name('dotenv-tool')
.description('Tool to read and update .env files')
.version(version, '-v, --version')
.addCommand(makeReadCommand())
.argument('[paramsToSet...]', 'space separated list of additional envs to set, in format key=value', '')
.option('-i, --files <filePaths...>', 'Input file(s)')
.option('-o, --outputFile <filePath>', 'Output file')
.option('-m, --modify', 'Modify first input file')
.option('-b, --beautify', 'Beautifies resulting env file')
.option('-u, --onlyUpdate', 'Only updates existing values, without appending new values')
.option('-p, --smartAppend', 'Smart appends variables depending on their names')
.option('-r, --resilient', 'Ignore files that cannot be read during update')
.option('-s, --silent', 'Mute all messages and errors')
.action((paramsToSet, options, cmd) => {
try {
const inputData: string[] = options.files
? options.files.map((fname: string, index: number) => {
try {
return fs.readFileSync(fname).toString();
} catch (e: any) {
if (!options.resilient) {
if (!options.silent) process.stderr.write(`Updating failed, cannot read file: ${fname}\n`);
if (!options.silent) process.stderr.write(e.toString() + '\n');
process.exit(1);
} else {
return '';
}
}
})
: [stdinToString()];
const sourceName = options.files ? `file ${options.files[0]}` : `stdin`;
const config: Config = {};
const remainingFiles = (options.files && options.files.length) > 1 ? options.files.slice(1) : [];
let validSource = false;
let sourceParsingError = '';
config.beautify = options.beautify || false;
config.modifyMode = options.onlyUpdate ? ModifyMode.NO_APPEND : options.smartAppend ? ModifyMode.SMART_APPEND : ModifyMode.APPEND;
const baseData = inputData.shift() || '';
let tokens = [];
try {
tokens.push(...parseMultiLine(baseData));
validSource = true;
} catch (e: any) {
sourceParsingError = e;
if (!options.resilient) {
if (!options.silent) process.stderr.write(`Updating failed, cannot parse source ${sourceName}\n`);
if (!options.silent) process.stderr.write(e.toString() + '\n');
process.exit(1);
}
}
let updates = [];
while (inputData.length > 0) {
const parsedFile = remainingFiles.shift();
try {
const updateTokens = parseMultiLine(inputData.shift() || '');
updates.push(...updateTokens.filter((t) => t.token === TokenType.VARIABLE).map((t) => t as VariableToken));
} catch (e: any) {
if (!options.resilient) {
if (!options.silent) process.stderr.write(`Updating failed, cannot parse file: ${parsedFile}\n`);
if (!options.silent) process.stderr.write(e.toString() + '\n');
process.exit(1);
}
}
}
if (paramsToSet && paramsToSet.length > 0) {
try {
const paramsFixed = normalizeParams(paramsToSet, options.resilient);
paramsFixed.forEach((param) => {
try {
const updateTokens = parseMultiLine(param);
updates.push(...updateTokens.filter((t) => t.token === TokenType.VARIABLE).map((t) => t as VariableToken));
} catch (e: any) {
if (!options.resilient) {
if (!options.silent) process.stderr.write(`Updating failed, invalid param\n`);
if (!options.silent) process.stderr.write(e.toString() + '\n');
process.exit(1);
}
}
});
} catch (e: any) {
if (!options.resilient) {
if (!options.silent) process.stderr.write(`Updating failed, cannot parse params passed\n`);
if (!options.silent) process.stderr.write(e.toString() + '\n');
process.exit(1);
}
}
}
tokens = update(tokens, updates, config);
const outputStr = stringifyTokens(tokens, config);
if (options.outputFile) {
fs.writeFileSync(options.outputFile, outputStr);
} else if (options.modify && options.files.length > 0) {
if (validSource) {
fs.writeFileSync(options.files[0], outputStr);
} else {
if (!options.silent) process.stderr.write(`Overwriting invalid source file not possible: ${options.files[0]}\n`);
if (!options.silent) process.stderr.write(sourceParsingError.toString() + '\n');
process.exit(1);
}
} else {
process.stdout.write(outputStr);
}
process.exit();
} catch (e: any) {
if (!options.resilient) {
if (!options.silent) process.stderr.write(`Updating failed - other error\n`);
if (!options.silent) process.stderr.write(e.toString() + '\n');
}
process.exit(1);
}
});
return program;
}

@ -0,0 +1,42 @@
import fs from 'fs';
const BUFSIZE = 256;
const buf = Buffer.alloc(BUFSIZE);
let bytesRead;
let stdin = '';
export function stdinToString(): string {
do {
// Loop as long as stdin input is available.
bytesRead = 0;
try {
bytesRead = fs.readSync(process.stdin.fd, buf, 0, BUFSIZE, null);
} catch (e: any) {
if (e.code === 'EAGAIN') {
// 'resource temporarily unavailable'
// Happens on OS X 10.8.3 (not Windows 7!), if there's no
// stdin input - typically when invoking a script without any
// input (for interactive stdin input).
// If you were to just continue, you'd create a tight loop.
// throw 'ERROR: interactive stdin input not supported.';
break;
} else if (e.code === 'EOF') {
// Happens on Windows 7, but not OS X 10.8.3:
// simply signals the end of *piped* stdin input.
break;
}
throw e; // unexpected exception
}
if (bytesRead === 0) {
// No more stdin input available.
// OS X 10.8.3: regardless of input method, this is how the end
// of input is signaled.
// Windows 7: this is how the end of input is signaled for
// *interactive* stdin input.
break;
}
// Process the chunk read.
stdin += buf.toString(undefined, 0, bytesRead);
} while (bytesRead > 0);
return stdin;
}

@ -1,37 +1,3 @@
#! /usr/bin/env node export * from './types'
import commander, { program } from 'commander'; export * from './parser'
import { getVersionSync } from '@bconnorwhite/module'; export { beautify, update } from './manipulation'
function makeReadCommand() {
const getCmd = new commander.Command('get');
getCmd
.description('Returns given variable from env file (if specified)')
.option('-f, --file <filePath>', 'Input file to parse (if not given, stdio is used)')
.argument('<key>', 'env variable name a.k.a. key')
.action((key) => {
const options = program.opts();
console.log(options);
console.log('heat jug', key);
});
return getCmd;
}
program
.name('dotenv-tool')
.description('Tool to read and update .env files')
.version(getVersionSync(__dirname) || '1.0.0', '-v, --version')
.argument('[paramsToSet]', 'space separated list of additional envs to set, in format key=value', '')
.option('-f, --files <filePaths...>', 'Input file(s)')
.option('-o, --outputFile <filePath>', 'Output file')
.option('-m, --modify', 'Modify first input file')
.action((paramsToSet) => {
console.log('got', paramsToSet);
const options = program.opts();
console.log(options);
})
.addCommand(makeReadCommand())
.parse(process.argv);

@ -1,10 +1,10 @@
import { update, fix_token_list } from './manipulation'; import { update, fixTokenListNewLines, beautify } from './manipulation';
import { stringifyTokens } from './parser'; import { parseMultiLine, stringifyTokens } from './parser';
import { ModifyMode } from './types'; import { ModifyMode, Config } from './types';
describe('Updating tokens', () => { describe('Updating tokens', () => {
it('Creates empty file', () => { it('Creates empty file', () => {
expect(stringifyTokens(update([], [], { normalize: true }))).toEqual('\n'); expect(stringifyTokens(update([], [], { beautify: true }))).toEqual('\n');
expect(stringifyTokens(update([], [], { enforceNewLineEnd: false }))).toEqual(''); expect(stringifyTokens(update([], [], { enforceNewLineEnd: false }))).toEqual('');
}); });
it('appends at update with APPEND mode', () => { it('appends at update with APPEND mode', () => {
@ -40,7 +40,7 @@ describe('Updating tokens', () => {
['ALAMA', 'kota'], ['ALAMA', 'kota'],
['KOTMA', 'alę'], ['KOTMA', 'alę'],
], ],
{ modifyMode: ModifyMode.EXISTING_ONLY }, { modifyMode: ModifyMode.NO_APPEND },
), ),
), ),
).toEqual('\n'); ).toEqual('\n');
@ -232,7 +232,7 @@ describe('Updating tokens', () => {
describe('Fixing token list', () => { describe('Fixing token list', () => {
it('merges and interleave tokens with new lines', () => { it('merges and interleave tokens with new lines', () => {
expect( expect(
fix_token_list([ fixTokenListNewLines([
ctoken('###########'), ctoken('###########'),
ctoken('# Server'), ctoken('# Server'),
ctoken('###########'), ctoken('###########'),
@ -279,3 +279,52 @@ describe('Fixing token list', () => {
]); ]);
}); });
}); });
describe('Beautifying output', () => {
it('appends single new line', () => {
expect(beautify([], { beautify: true })).toEqual([nltoken()]);
});
it('beautify output', () => {
const cfg: Config = { beautify: true };
expect(
stringifyTokens(
beautify(
parseMultiLine(
'\n\n\r\n #-------------------------\n' +
'# Server \n' +
' #-------------------------\n\n\n\n' +
'SERVER_HOST=127.0.0.1\nSERVER_OUTPUT= /dev/null # simply ignore it \nSERVER_PORT=80\nSERVER_LOGIN=root\n' +
'#-------------------------\n' +
'# Client\n' +
'#-------------------------\n\r' +
'CLIENT_ACCESS=limited \nCLIENT_LOGIN =john\nCLIENT_X_AXIS=12\nCLIENT_Z_AXIS=100\n' +
'#-------------------------\n\r' +
'AUTO_APPEND=true\n AUTO_RUN = true\nAUTO_CLEAN=false\nAUTO_STOP=true\n AUTO_ZERO = 000 \n' +
'#-------------------------\n' +
'# Other\n' +
'#-------------------------\n' +
'QUALITY=90%\n\n\r\n\nZOOM=100%\n\n\n',
),
cfg,
),
cfg,
),
).toEqual(
'#-------------------------\n' +
'# Server\n' +
'#-------------------------\n\n' +
'SERVER_HOST=127.0.0.1\nSERVER_OUTPUT=/dev/null # simply ignore it\nSERVER_PORT=80\nSERVER_LOGIN=root\n\n' +
'#-------------------------\n' +
'# Client\n' +
'#-------------------------\n\n' +
'CLIENT_ACCESS=limited\nCLIENT_LOGIN=john\nCLIENT_X_AXIS=12\nCLIENT_Z_AXIS=100\n\n' +
'#-------------------------\n\n' +
'AUTO_APPEND=true\nAUTO_RUN=true\nAUTO_CLEAN=false\nAUTO_STOP=true\nAUTO_ZERO=000\n\n' +
'#-------------------------\n' +
'# Other\n' +
'#-------------------------\n\n' +
'QUALITY=90%\n\nZOOM=100%\n',
);
});
});

@ -1,7 +1,7 @@
import { ModifyMode, Variable, VariableToken, TokenType, Token, StringifyConfig, VariableToUpdate } from './types'; import { ModifyMode, Variable, VariableToken, TokenType, Token, Config, VariableToUpdate } from './types';
import { compareWeighted, getDefaultConfig } from './utils'; import { compareWeighted, getDefaultConfig } from './utils';
export function VariableTokenFrom(variable: Variable | VariableToken): VariableToken { function variableTokenFrom(variable: Variable | VariableToken): VariableToken {
return { return {
beginning: (variable as VariableToken).beginning || '', beginning: (variable as VariableToken).beginning || '',
comment: (variable as Variable).comment || '', comment: (variable as Variable).comment || '',
@ -14,7 +14,7 @@ export function VariableTokenFrom(variable: Variable | VariableToken): VariableT
}; };
} }
export function fix_token_list(tokens: Token[]): Token[] { export function fixTokenListNewLines(tokens: Token[]): Token[] {
let fixed: Token[] = []; let fixed: Token[] = [];
let current = tokens.shift(); let current = tokens.shift();
@ -40,10 +40,10 @@ export function fix_token_list(tokens: Token[]): Token[] {
return fixed; return fixed;
} }
export function normalize(tokens: Token[], config?: StringifyConfig): Token[] { export function beautify(tokens: Token[], config?: Config): Token[] {
const cfg = getDefaultConfig(config); const cfg = getDefaultConfig(config);
if (cfg.normalize) { if (cfg.beautify) {
tokens = tokens.filter((t) => t.token === TokenType.WHITESPACE); tokens = tokens.filter((t) => t.token !== TokenType.WHITESPACE);
tokens = tokens.map((t) => { tokens = tokens.map((t) => {
switch (t.token) { switch (t.token) {
case TokenType.COMMENT: case TokenType.COMMENT:
@ -53,16 +53,15 @@ export function normalize(tokens: Token[], config?: StringifyConfig): Token[] {
(t as VariableToken).beginning = ''; (t as VariableToken).beginning = '';
(t as VariableToken).ending = ''; (t as VariableToken).ending = '';
(t as VariableToken).equals = '='; (t as VariableToken).equals = '=';
(t as VariableToken).comment = ' ' + (t as VariableToken).comment.trim(); (t as VariableToken).comment = ((t as VariableToken).comment.length > 0) ? ' ' + (t as VariableToken).comment.trim() : '';
return t; return t;
case TokenType.NEWLINE: case TokenType.NEWLINE:
t.value = t.value.replace(/\r/g, ''); t.value = t.value.replace(/\r/g, '');
return t;
default: default:
return t; return t;
} }
}); });
tokens = fix_token_list(tokens); tokens = fixTokenListNewLines(tokens);
while (tokens.length > 0 && tokens[0].token === TokenType.NEWLINE) { while (tokens.length > 0 && tokens[0].token === TokenType.NEWLINE) {
tokens.shift(); tokens.shift();
} }
@ -89,15 +88,19 @@ export function normalize(tokens: Token[], config?: StringifyConfig): Token[] {
if (tokens.length == 0 || tokens[tokens.length - 1].token !== TokenType.NEWLINE) { if (tokens.length == 0 || tokens[tokens.length - 1].token !== TokenType.NEWLINE) {
tokens.push({ token: TokenType.NEWLINE, value: '\n' }); tokens.push({ token: TokenType.NEWLINE, value: '\n' });
} }
if (tokens.length > 0 && tokens[tokens.length - 1].token === TokenType.NEWLINE) {
tokens[tokens.length -1].value = '\n';
}
} }
return tokens; return tokens;
} }
export function update(tokens: Token[], updateWith: VariableToUpdate[], config?: StringifyConfig): Token[] { export function update(tokens: Token[], updateWith: VariableToUpdate[], config?: Config): Token[] {
const cfg = getDefaultConfig(config); const cfg = getDefaultConfig(config);
tokens = fix_token_list(tokens); tokens = fixTokenListNewLines(tokens);
updateWith.forEach((u) => { updateWith.forEach((u) => {
const updateVar: Variable | VariableToken = Array.isArray(u) ? { name: u[0], value: u[1] } : u; const updateVar: Variable | VariableToken = Array.isArray(u) ? { name: u[0], value: u[1] } : u;
@ -143,9 +146,9 @@ export function update(tokens: Token[], updateWith: VariableToUpdate[], config?:
}); });
if (insertBeforePos > -1 || insertAfterPos == -1) { if (insertBeforePos > -1 || insertAfterPos == -1) {
tokens.splice(insertBeforePos > -1 ? insertBeforePos : 0, 0, VariableTokenFrom(updateVar), { token: TokenType.NEWLINE, value: '\n' }); tokens.splice(insertBeforePos > -1 ? insertBeforePos : 0, 0, variableTokenFrom(updateVar), { token: TokenType.NEWLINE, value: '\n' });
} else { } else {
tokens.splice(insertAfterPos + 1, 0, { token: TokenType.NEWLINE, value: '\n' }, VariableTokenFrom(updateVar)); tokens.splice(insertAfterPos + 1, 0, { token: TokenType.NEWLINE, value: '\n' }, variableTokenFrom(updateVar));
} }
break; break;
@ -153,18 +156,18 @@ export function update(tokens: Token[], updateWith: VariableToUpdate[], config?:
if (tokens.length > 0 && tokens[tokens.length - 1].token !== TokenType.NEWLINE) { if (tokens.length > 0 && tokens[tokens.length - 1].token !== TokenType.NEWLINE) {
tokens.push({ token: TokenType.NEWLINE, value: '\n' }); tokens.push({ token: TokenType.NEWLINE, value: '\n' });
} }
tokens.push(VariableTokenFrom(updateVar)); tokens.push(variableTokenFrom(updateVar));
tokens.push({ token: TokenType.NEWLINE, value: '\n' }); tokens.push({ token: TokenType.NEWLINE, value: '\n' });
break; break;
} }
} }
}); });
if (cfg.normalize) { if (cfg.beautify) {
tokens = normalize(tokens, cfg); tokens = beautify(tokens, cfg);
} }
if (cfg.normalize || cfg.enforceNewLineEnd) { if (cfg.beautify || cfg.enforceNewLineEnd) {
// end each file with new line // end each file with new line
if (tokens.length == 0 || tokens[tokens.length - 1].token !== TokenType.NEWLINE) { if (tokens.length == 0 || tokens[tokens.length - 1].token !== TokenType.NEWLINE) {
tokens.push({ token: TokenType.NEWLINE, value: '\n' }); tokens.push({ token: TokenType.NEWLINE, value: '\n' });

@ -9,7 +9,7 @@ describe('MultiLine format parser', () => {
expect(parseMultiLine('VARNAME=value \t ')).toEqual([vtoken('VARNAME', 'value', '', '', '', ' \t ')]); expect(parseMultiLine('VARNAME=value \t ')).toEqual([vtoken('VARNAME', 'value', '', '', '', ' \t ')]);
expect(parseMultiLine('VARNAME=A:\\DYNA')).toEqual([vtoken('VARNAME', 'A:\\DYNA')]); expect(parseMultiLine('VARNAME=A:\\DYNA')).toEqual([vtoken('VARNAME', 'A:\\DYNA')]);
expect(parseMultiLine('VARNAME=quote\'is"ok')).toEqual([vtoken('VARNAME', 'quote\'is"ok')]); expect(parseMultiLine('VARNAME=quote\'is"ok')).toEqual([vtoken('VARNAME', 'quote\'is"ok')]);
expect(parseMultiLine('VARNAME=value not escaped')).toMatch(/PARSING FAILED/); expect(() => parseMultiLine('VARNAME=value not escaped')).toThrow('PARSING FAILED');
}); });
it('prop have defined names', () => { it('prop have defined names', () => {
@ -19,12 +19,12 @@ describe('MultiLine format parser', () => {
expect(parseMultiLine('_FINE=value')).toEqual([vtoken('_FINE', 'value')]); expect(parseMultiLine('_FINE=value')).toEqual([vtoken('_FINE', 'value')]);
expect(parseMultiLine('A=value')).toEqual([vtoken('A', 'value')]); expect(parseMultiLine('A=value')).toEqual([vtoken('A', 'value')]);
expect(parseMultiLine('x00001=value')).toEqual([vtoken('x00001', 'value')]); expect(parseMultiLine('x00001=value')).toEqual([vtoken('x00001', 'value')]);
expect(parseMultiLine('007=Bond')).toMatch(/PARSING FAILED/); expect(() => parseMultiLine('007=Bond')).toThrow(/PARSING FAILED/);
expect(parseMultiLine('kłącze=value')).toMatch(/PARSING FAILED/); expect(() => parseMultiLine('kłącze=value')).toThrow(/PARSING FAILED/);
expect(parseMultiLine('var$name=value')).toMatch(/PARSING FAILED/); expect(() => parseMultiLine('var$name=value')).toThrow(/PARSING FAILED/);
expect(parseMultiLine('not%alowed=value')).toMatch(/PARSING FAILED/); expect(() => parseMultiLine('not%alowed=value')).toThrow(/PARSING FAILED/);
expect(parseMultiLine('#thiswontbevar=value')).toEqual([ctoken('#thiswontbevar=value')]); expect(parseMultiLine('#thiswontbevar=value')).toEqual([ctoken('#thiswontbevar=value')]);
expect(parseMultiLine('and#thisisnoteven=comment')).toMatch(/PARSING FAILED/); expect(() => parseMultiLine('and#thisisnoteven=comment')).toThrow(/PARSING FAILED/);
}); });
it('parse double quoted prop', () => { it('parse double quoted prop', () => {
@ -34,7 +34,7 @@ describe('MultiLine format parser', () => {
expect(parseMultiLine('VARNAME="value with spaces"')).toEqual([vtoken('VARNAME', 'value with spaces', '"')]); expect(parseMultiLine('VARNAME="value with spaces"')).toEqual([vtoken('VARNAME', 'value with spaces', '"')]);
expect(parseMultiLine('VARNAME=" value with spaces"')).toEqual([vtoken('VARNAME', ' value with spaces', '"')]); expect(parseMultiLine('VARNAME=" value with spaces"')).toEqual([vtoken('VARNAME', ' value with spaces', '"')]);
expect(parseMultiLine('VARNAME="value with spaces "')).toEqual([vtoken('VARNAME', 'value with spaces ', '"')]); expect(parseMultiLine('VARNAME="value with spaces "')).toEqual([vtoken('VARNAME', 'value with spaces ', '"')]);
expect(parseMultiLine('VARNAME="not closed')).toMatch(/PARSING FAILED/); expect(() => parseMultiLine('VARNAME="not closed')).toThrow(/PARSING FAILED/);
}); });
it('escapes in double quot', () => { it('escapes in double quot', () => {
@ -52,7 +52,7 @@ describe('MultiLine format parser', () => {
expect(parseMultiLine(`VARNAME='value with spaces'`)).toEqual([vtoken('VARNAME', 'value with spaces', "'")]); expect(parseMultiLine(`VARNAME='value with spaces'`)).toEqual([vtoken('VARNAME', 'value with spaces', "'")]);
expect(parseMultiLine(`VARNAME=' value with spaces'`)).toEqual([vtoken('VARNAME', ' value with spaces', "'")]); expect(parseMultiLine(`VARNAME=' value with spaces'`)).toEqual([vtoken('VARNAME', ' value with spaces', "'")]);
expect(parseMultiLine(`VARNAME='value with spaces '`)).toEqual([vtoken('VARNAME', 'value with spaces ', "'")]); expect(parseMultiLine(`VARNAME='value with spaces '`)).toEqual([vtoken('VARNAME', 'value with spaces ', "'")]);
expect(parseMultiLine(`VARNAME='not closed`)).toMatch(/PARSING FAILED/); expect(() => parseMultiLine(`VARNAME='not closed`)).toThrow(/PARSING FAILED/);
}); });
it('escapes in single quot', () => { it('escapes in single quot', () => {
@ -188,4 +188,3 @@ describe('MultiLine format stringifier', () => {
); );
}); });
}); });

@ -1,5 +1,5 @@
import Parsimmon from 'parsimmon'; import Parsimmon from 'parsimmon';
import { Token, StringifyConfig, TokenType, VariableToken } from './types'; import { Token, Config, TokenType, VariableToken } from './types';
import { getDefaultConfig } from './utils'; import { getDefaultConfig } from './utils';
const varName = Parsimmon.regexp(/[a-zA-Z_]+[a-zA-Z0-9_]*/).desc('environment variable name'); const varName = Parsimmon.regexp(/[a-zA-Z_]+[a-zA-Z0-9_]*/).desc('environment variable name');
@ -72,11 +72,11 @@ export function parseMultiLine(input: string): Token[] {
.flatMap((o) => [{ token: 'newline', value: o.nl_in_front }, { ...o.content }]) .flatMap((o) => [{ token: 'newline', value: o.nl_in_front }, { ...o.content }])
.filter((t) => !((t.token === 'newline' || t.token === 'whitespace') && t.value === '')); .filter((t) => !((t.token === 'newline' || t.token === 'whitespace') && t.value === ''));
} catch (err: any) { } catch (err: any) {
return err.message; throw err;
} }
} }
export function stringifyTokens(tokens: Token[], config?: StringifyConfig): string { export function stringifyTokens(tokens: Token[], config?: Config): string {
const cfg = getDefaultConfig(config); const cfg = getDefaultConfig(config);
return tokens return tokens
.map((t) => { .map((t) => {

@ -22,7 +22,7 @@ export type VariableToken = {
export type Token = SimpleToken | VariableToken; export type Token = SimpleToken | VariableToken;
export enum ModifyMode { export enum ModifyMode {
EXISTING_ONLY = 'existing-only', NO_APPEND = 'no-append',
APPEND = 'append', APPEND = 'append',
SMART_APPEND = 'smart-append', SMART_APPEND = 'smart-append',
} }
@ -33,8 +33,8 @@ export type Variable = {
comment?: string; comment?: string;
}; };
export type StringifyConfig = { export type Config = {
normalize?: boolean; beautify?: boolean;
enforceNewLineEnd?: boolean; enforceNewLineEnd?: boolean;
modifyMode?: ModifyMode; modifyMode?: ModifyMode;
}; };

@ -1,8 +1,8 @@
import { ModifyMode, StringifyConfig } from './types'; import { ModifyMode, Config } from './types';
const DEFAULT_CONFIG = { modifyMode: ModifyMode.SMART_APPEND, normalize: false, enforceNewLineEnd: true }; const DEFAULT_CONFIG = { modifyMode: ModifyMode.SMART_APPEND, normalize: false, enforceNewLineEnd: true };
export function getDefaultConfig(customConfig?: StringifyConfig): StringifyConfig { export function getDefaultConfig(customConfig?: Config): Config {
return { ...DEFAULT_CONFIG, ...(customConfig || {})}; return { ...DEFAULT_CONFIG, ...(customConfig || {})};
} }

@ -0,0 +1,18 @@
{
"include": ["./src/*.ts", "./src/tests/utils/**.d.ts"],
"compilerOptions": {
"allowSyntheticDefaultImports": true,
"declaration": true,
"declarationMap": true,
"inlineSources": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"isolatedModules": true,
"moduleResolution": "node",
"resolveJsonModule": true,
"sourceMap": true,
"strict": true,
"target": "es6",
"module": "commonjs"
}
}

@ -0,0 +1,8 @@
{
"extends": "./tsconfig-base.json",
"exclude": ["src/bin.ts"],
"compilerOptions": {
"module": "esnext",
"outDir": "dist/mjs"
}
}

@ -1,13 +1,8 @@
{ {
"extends": "./tsconfig-base.json",
"compilerOptions": { "compilerOptions": {
"rootDir": "src",
"outDir": "dist",
"strict": true,
"target": "es6",
"module": "commonjs", "module": "commonjs",
"sourceMap": true, "outDir": "dist/cjs",
"esModuleInterop": true, "moduleResolution": "Node"
"moduleResolution": "node" }
},
"include": ["./src/*.ts", "./src/tests/utils/**.d.ts"]
} }

@ -0,0 +1,15 @@
const fs = require('fs');
const path = require('path');
const version = require("./package.json").version
fs.writeFileSync(`${__dirname}${path.sep}dist${path.sep}cjs${path.sep}package.json`, JSON.stringify({
version: version,
type: 'commonjs'
}, null, ' '));
fs.writeFileSync(`${__dirname}${path.sep}dist${path.sep}mjs${path.sep}package.json`, JSON.stringify({
version: version,
type: 'module'
}, null, ' '));
fs.chmodSync(`${__dirname}${path.sep}dist${path.sep}cjs${path.sep}src${path.sep}bin.js`, 0o755)
Loading…
Cancel
Save