Remove optional files
This commit is contained in:
parent
579ccdc29f
commit
15c35c428e
131 changed files with 0 additions and 14812 deletions
12
node_modules/.bin/errno
generated
vendored
12
node_modules/.bin/errno
generated
vendored
|
@ -1,12 +0,0 @@
|
|||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../errno/cli.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../errno/cli.js" "$@"
|
||||
fi
|
17
node_modules/.bin/errno.cmd
generated
vendored
17
node_modules/.bin/errno.cmd
generated
vendored
|
@ -1,17 +0,0 @@
|
|||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\errno\cli.js" %*
|
28
node_modules/.bin/errno.ps1
generated
vendored
28
node_modules/.bin/errno.ps1
generated
vendored
|
@ -1,28 +0,0 @@
|
|||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../errno/cli.js" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../errno/cli.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../errno/cli.js" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../errno/cli.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
12
node_modules/.bin/image-size
generated
vendored
12
node_modules/.bin/image-size
generated
vendored
|
@ -1,12 +0,0 @@
|
|||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../image-size/bin/image-size.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../image-size/bin/image-size.js" "$@"
|
||||
fi
|
17
node_modules/.bin/image-size.cmd
generated
vendored
17
node_modules/.bin/image-size.cmd
generated
vendored
|
@ -1,17 +0,0 @@
|
|||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\image-size\bin\image-size.js" %*
|
28
node_modules/.bin/image-size.ps1
generated
vendored
28
node_modules/.bin/image-size.ps1
generated
vendored
|
@ -1,28 +0,0 @@
|
|||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../image-size/bin/image-size.js" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../image-size/bin/image-size.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../image-size/bin/image-size.js" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../image-size/bin/image-size.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
12
node_modules/.bin/mime
generated
vendored
12
node_modules/.bin/mime
generated
vendored
|
@ -1,12 +0,0 @@
|
|||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../mime/cli.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../mime/cli.js" "$@"
|
||||
fi
|
17
node_modules/.bin/mime.cmd
generated
vendored
17
node_modules/.bin/mime.cmd
generated
vendored
|
@ -1,17 +0,0 @@
|
|||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mime\cli.js" %*
|
28
node_modules/.bin/mime.ps1
generated
vendored
28
node_modules/.bin/mime.ps1
generated
vendored
|
@ -1,28 +0,0 @@
|
|||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../mime/cli.js" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../mime/cli.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../mime/cli.js" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../mime/cli.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
12
node_modules/.bin/needle
generated
vendored
12
node_modules/.bin/needle
generated
vendored
|
@ -1,12 +0,0 @@
|
|||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../needle/bin/needle" "$@"
|
||||
else
|
||||
exec node "$basedir/../needle/bin/needle" "$@"
|
||||
fi
|
17
node_modules/.bin/needle.cmd
generated
vendored
17
node_modules/.bin/needle.cmd
generated
vendored
|
@ -1,17 +0,0 @@
|
|||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\needle\bin\needle" %*
|
28
node_modules/.bin/needle.ps1
generated
vendored
28
node_modules/.bin/needle.ps1
generated
vendored
|
@ -1,28 +0,0 @@
|
|||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../needle/bin/needle" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../needle/bin/needle" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../needle/bin/needle" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../needle/bin/needle" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
102
node_modules/.package-lock.json
generated
vendored
102
node_modules/.package-lock.json
generated
vendored
|
@ -1432,18 +1432,6 @@
|
|||
"once": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/errno": {
|
||||
"version": "0.1.8",
|
||||
"resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz",
|
||||
"integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"prr": "~1.0.1"
|
||||
},
|
||||
"bin": {
|
||||
"errno": "cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/error-ex": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
|
||||
|
@ -3445,18 +3433,6 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/image-size": {
|
||||
"version": "0.5.5",
|
||||
"resolved": "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz",
|
||||
"integrity": "sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w=",
|
||||
"optional": true,
|
||||
"bin": {
|
||||
"image-size": "bin/image-size.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/import-regex": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/import-regex/-/import-regex-1.1.0.tgz",
|
||||
|
@ -4288,28 +4264,6 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/make-dir": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz",
|
||||
"integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"pify": "^4.0.1",
|
||||
"semver": "^5.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/make-dir/node_modules/pify": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz",
|
||||
"integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/make-error": {
|
||||
"version": "1.3.6",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
|
||||
|
@ -4475,18 +4429,6 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/mime": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
|
||||
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
|
||||
"optional": true,
|
||||
"bin": {
|
||||
"mime": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/mimic-fn": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz",
|
||||
|
@ -4645,38 +4587,6 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/needle": {
|
||||
"version": "2.9.1",
|
||||
"resolved": "https://registry.npmjs.org/needle/-/needle-2.9.1.tgz",
|
||||
"integrity": "sha512-6R9fqJ5Zcmf+uYaFgdIHmLwNldn5HbK8L5ybn7Uz+ylX/rnOsSp1AHcvQSrCaFN+qNM1wpymHqD7mVasEOlHGQ==",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"debug": "^3.2.6",
|
||||
"iconv-lite": "^0.4.4",
|
||||
"sax": "^1.2.4"
|
||||
},
|
||||
"bin": {
|
||||
"needle": "bin/needle"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 4.4.x"
|
||||
}
|
||||
},
|
||||
"node_modules/needle/node_modules/debug": {
|
||||
"version": "3.2.7",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
|
||||
"integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"ms": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/needle/node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/next-tick": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz",
|
||||
|
@ -5348,12 +5258,6 @@
|
|||
"resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz",
|
||||
"integrity": "sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk="
|
||||
},
|
||||
"node_modules/prr": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz",
|
||||
"integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/pseudomap": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz",
|
||||
|
@ -5880,12 +5784,6 @@
|
|||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
},
|
||||
"node_modules/sax": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
|
||||
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
|
||||
|
|
59
node_modules/errno/.jshintrc
generated
vendored
59
node_modules/errno/.jshintrc
generated
vendored
|
@ -1,59 +0,0 @@
|
|||
{
|
||||
"predef": [ ]
|
||||
, "bitwise": false
|
||||
, "camelcase": false
|
||||
, "curly": false
|
||||
, "eqeqeq": false
|
||||
, "forin": false
|
||||
, "immed": false
|
||||
, "latedef": false
|
||||
, "noarg": true
|
||||
, "noempty": true
|
||||
, "nonew": true
|
||||
, "plusplus": false
|
||||
, "quotmark": true
|
||||
, "regexp": false
|
||||
, "undef": true
|
||||
, "unused": true
|
||||
, "strict": false
|
||||
, "trailing": true
|
||||
, "maxlen": 120
|
||||
, "asi": true
|
||||
, "boss": true
|
||||
, "debug": true
|
||||
, "eqnull": true
|
||||
, "esnext": true
|
||||
, "evil": true
|
||||
, "expr": true
|
||||
, "funcscope": false
|
||||
, "globalstrict": false
|
||||
, "iterator": false
|
||||
, "lastsemic": true
|
||||
, "laxbreak": true
|
||||
, "laxcomma": true
|
||||
, "loopfunc": true
|
||||
, "multistr": false
|
||||
, "onecase": false
|
||||
, "proto": false
|
||||
, "regexdash": false
|
||||
, "scripturl": true
|
||||
, "smarttabs": false
|
||||
, "shadow": false
|
||||
, "sub": true
|
||||
, "supernew": false
|
||||
, "validthis": true
|
||||
, "browser": true
|
||||
, "couch": false
|
||||
, "devel": false
|
||||
, "dojo": false
|
||||
, "mootools": false
|
||||
, "node": true
|
||||
, "nonstandard": true
|
||||
, "prototypejs": false
|
||||
, "rhino": false
|
||||
, "worker": true
|
||||
, "wsh": false
|
||||
, "nomen": false
|
||||
, "onevar": false
|
||||
, "passfail": false
|
||||
}
|
19
node_modules/errno/.travis.yml
generated
vendored
19
node_modules/errno/.travis.yml
generated
vendored
|
@ -1,19 +0,0 @@
|
|||
sudo: false
|
||||
|
||||
language: node_js
|
||||
|
||||
node_js:
|
||||
- 14
|
||||
- 12
|
||||
- 10
|
||||
- 9
|
||||
- 8
|
||||
- 7
|
||||
- 6
|
||||
- 5
|
||||
- 4
|
||||
|
||||
arch:
|
||||
- amd64
|
||||
- ppc64le
|
||||
|
145
node_modules/errno/README.md
generated
vendored
145
node_modules/errno/README.md
generated
vendored
|
@ -1,145 +0,0 @@
|
|||
# node-errno
|
||||
|
||||
> Better [libuv](https://github.com/libuv/libuv)/[Node.js](https://nodejs.org)/[io.js](https://iojs.org) error handling & reporting. Available in npm as *errno*.
|
||||
|
||||
[![npm](https://img.shields.io/npm/v/errno.svg)](https://www.npmjs.com/package/errno)
|
||||
[![Build Status](https://secure.travis-ci.org/rvagg/node-errno.png)](http://travis-ci.org/rvagg/node-errno)
|
||||
[![npm](https://img.shields.io/npm/dm/errno.svg)](https://www.npmjs.com/package/errno)
|
||||
|
||||
* [errno exposed](#errnoexposed)
|
||||
* [Custom errors](#customerrors)
|
||||
|
||||
<a name="errnoexposed"></a>
|
||||
## errno exposed
|
||||
|
||||
Ever find yourself needing more details about Node.js errors? Me too, so *node-errno* contains the errno mappings direct from libuv so you can use them in your code.
|
||||
|
||||
**By errno:**
|
||||
|
||||
```js
|
||||
require('errno').errno[3]
|
||||
// → {
|
||||
// "errno": 3,
|
||||
// "code": "EACCES",
|
||||
// "description": "permission denied"
|
||||
// }
|
||||
```
|
||||
|
||||
**By code:**
|
||||
|
||||
```js
|
||||
require('errno').code.ENOTEMPTY
|
||||
// → {
|
||||
// "errno": 53,
|
||||
// "code": "ENOTEMPTY",
|
||||
// "description": "directory not empty"
|
||||
// }
|
||||
```
|
||||
|
||||
**Make your errors more descriptive:**
|
||||
|
||||
```js
|
||||
var errno = require('errno')
|
||||
|
||||
function errmsg(err) {
|
||||
var str = 'Error: '
|
||||
// if it's a libuv error then get the description from errno
|
||||
if (errno.errno[err.errno])
|
||||
str += errno.errno[err.errno].description
|
||||
else
|
||||
str += err.message
|
||||
|
||||
// if it's a `fs` error then it'll have a 'path' property
|
||||
if (err.path)
|
||||
str += ' [' + err.path + ']'
|
||||
|
||||
return str
|
||||
}
|
||||
|
||||
var fs = require('fs')
|
||||
|
||||
fs.readFile('thisisnotarealfile.txt', function (err, data) {
|
||||
if (err)
|
||||
console.log(errmsg(err))
|
||||
})
|
||||
```
|
||||
|
||||
**Use as a command line tool:**
|
||||
|
||||
```
|
||||
~ $ errno 53
|
||||
{
|
||||
"errno": 53,
|
||||
"code": "ENOTEMPTY",
|
||||
"description": "directory not empty"
|
||||
}
|
||||
~ $ errno EROFS
|
||||
{
|
||||
"errno": 56,
|
||||
"code": "EROFS",
|
||||
"description": "read-only file system"
|
||||
}
|
||||
~ $ errno foo
|
||||
No such errno/code: "foo"
|
||||
```
|
||||
|
||||
Supply no arguments for the full list. Error codes are processed case-insensitive.
|
||||
|
||||
You will need to install with `npm install errno -g` if you want the `errno` command to be available without supplying a full path to the node_modules installation.
|
||||
|
||||
<a name="customerrors"></a>
|
||||
## Custom errors
|
||||
|
||||
Use `errno.custom.createError()` to create custom `Error` objects to throw around in your Node.js library. Create error hierarchies so `instanceof` becomes a useful tool in tracking errors. Call-stack is correctly captured at the time you create an instance of the error object, plus a `cause` property will make available the original error object if you pass one in to the constructor.
|
||||
|
||||
```js
|
||||
var create = require('errno').custom.createError
|
||||
var MyError = create('MyError') // inherits from Error
|
||||
var SpecificError = create('SpecificError', MyError) // inherits from MyError
|
||||
var OtherError = create('OtherError', MyError)
|
||||
|
||||
// use them!
|
||||
if (condition) throw new SpecificError('Eeek! Something bad happened')
|
||||
|
||||
if (err) return callback(new OtherError(err))
|
||||
```
|
||||
|
||||
Also available is a `errno.custom.FilesystemError` with in-built access to errno properties:
|
||||
|
||||
```js
|
||||
fs.readFile('foo', function (err, data) {
|
||||
if (err) return callback(new errno.custom.FilesystemError(err))
|
||||
// do something else
|
||||
})
|
||||
```
|
||||
|
||||
The resulting error object passed through the callback will have the following properties: `code`, `errno`, `path` and `message` will contain a descriptive human-readable message.
|
||||
|
||||
## Contributors
|
||||
|
||||
* [bahamas10](https://github.com/bahamas10) (Dave Eddy) - Added CLI
|
||||
* [ralphtheninja](https://github.com/ralphtheninja) (Lars-Magnus Skog)
|
||||
|
||||
## Copyright & Licence
|
||||
|
||||
*Copyright (c) 2012-2015 [Rod Vagg](https://github.com/rvagg) ([@rvagg](https://twitter.com/rvagg))*
|
||||
|
||||
Made available under the MIT licence:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is furnished
|
||||
to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
43
node_modules/errno/build.js
generated
vendored
43
node_modules/errno/build.js
generated
vendored
|
@ -1,43 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var request = require('request')
|
||||
, fs = require('fs')
|
||||
|
||||
, uvheadloc = 'https://raw.github.com/joyent/libuv/master/include/uv.h'
|
||||
, defreg = /^\s*XX\(\s*([\-\d]+),\s*([A-Z]+),\s*"([^"]*)"\s*\)\s*\\?$/
|
||||
|
||||
|
||||
request(uvheadloc, function (err, response) {
|
||||
if (err)
|
||||
throw err
|
||||
|
||||
var data, out
|
||||
|
||||
data = response.body
|
||||
.split('\n')
|
||||
.map(function (line) { return line.match(defreg) })
|
||||
.filter(function (match) { return match })
|
||||
.map(function (match) { return {
|
||||
errno: parseInt(match[1], 10)
|
||||
, code: match[2]
|
||||
, description: match[3]
|
||||
}})
|
||||
|
||||
out = 'var all = module.exports.all = ' + JSON.stringify(data, 0, 1) + '\n\n'
|
||||
|
||||
out += '\nmodule.exports.errno = {\n '
|
||||
+ data.map(function (e, i) {
|
||||
return '\'' + e.errno + '\': all[' + i + ']'
|
||||
}).join('\n , ')
|
||||
+ '\n}\n\n'
|
||||
|
||||
out += '\nmodule.exports.code = {\n '
|
||||
+ data.map(function (e, i) {
|
||||
return '\'' + e.code + '\': all[' + i + ']'
|
||||
}).join('\n , ')
|
||||
+ '\n}\n\n'
|
||||
|
||||
out += '\nmodule.exports.custom = require("./custom")(module.exports)\n'
|
||||
|
||||
fs.writeFile('errno.js', out)
|
||||
})
|
22
node_modules/errno/cli.js
generated
vendored
22
node_modules/errno/cli.js
generated
vendored
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var errno = require('./')
|
||||
, arg = process.argv[2]
|
||||
, data, code
|
||||
|
||||
if (arg === undefined) {
|
||||
console.log(JSON.stringify(errno.code, null, 2))
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
if ((code = +arg) == arg)
|
||||
data = errno.errno[code]
|
||||
else
|
||||
data = errno.code[arg] || errno.code[arg.toUpperCase()]
|
||||
|
||||
if (data)
|
||||
console.log(JSON.stringify(data, null, 2))
|
||||
else {
|
||||
console.error('No such errno/code: "' + arg + '"')
|
||||
process.exit(1)
|
||||
}
|
57
node_modules/errno/custom.js
generated
vendored
57
node_modules/errno/custom.js
generated
vendored
|
@ -1,57 +0,0 @@
|
|||
var prr = require('prr')
|
||||
|
||||
function init (type, message, cause) {
|
||||
if (!!message && typeof message != 'string') {
|
||||
message = message.message || message.name
|
||||
}
|
||||
prr(this, {
|
||||
type : type
|
||||
, name : type
|
||||
// can be passed just a 'cause'
|
||||
, cause : typeof message != 'string' ? message : cause
|
||||
, message : message
|
||||
}, 'ewr')
|
||||
}
|
||||
|
||||
// generic prototype, not intended to be actually used - helpful for `instanceof`
|
||||
function CustomError (message, cause) {
|
||||
Error.call(this)
|
||||
if (Error.captureStackTrace)
|
||||
Error.captureStackTrace(this, this.constructor)
|
||||
init.call(this, 'CustomError', message, cause)
|
||||
}
|
||||
|
||||
CustomError.prototype = new Error()
|
||||
|
||||
function createError (errno, type, proto) {
|
||||
var err = function (message, cause) {
|
||||
init.call(this, type, message, cause)
|
||||
//TODO: the specificity here is stupid, errno should be available everywhere
|
||||
if (type == 'FilesystemError') {
|
||||
this.code = this.cause.code
|
||||
this.path = this.cause.path
|
||||
this.errno = this.cause.errno
|
||||
this.message =
|
||||
(errno.errno[this.cause.errno]
|
||||
? errno.errno[this.cause.errno].description
|
||||
: this.cause.message)
|
||||
+ (this.cause.path ? ' [' + this.cause.path + ']' : '')
|
||||
}
|
||||
Error.call(this)
|
||||
if (Error.captureStackTrace)
|
||||
Error.captureStackTrace(this, err)
|
||||
}
|
||||
err.prototype = !!proto ? new proto() : new CustomError()
|
||||
return err
|
||||
}
|
||||
|
||||
module.exports = function (errno) {
|
||||
var ce = function (type, proto) {
|
||||
return createError(errno, type, proto)
|
||||
}
|
||||
return {
|
||||
CustomError : CustomError
|
||||
, FilesystemError : ce('FilesystemError')
|
||||
, createError : ce
|
||||
}
|
||||
}
|
313
node_modules/errno/errno.js
generated
vendored
313
node_modules/errno/errno.js
generated
vendored
|
@ -1,313 +0,0 @@
|
|||
var all = module.exports.all = [
|
||||
{
|
||||
errno: -2,
|
||||
code: 'ENOENT',
|
||||
description: 'no such file or directory'
|
||||
},
|
||||
{
|
||||
errno: -1,
|
||||
code: 'UNKNOWN',
|
||||
description: 'unknown error'
|
||||
},
|
||||
{
|
||||
errno: 0,
|
||||
code: 'OK',
|
||||
description: 'success'
|
||||
},
|
||||
{
|
||||
errno: 1,
|
||||
code: 'EOF',
|
||||
description: 'end of file'
|
||||
},
|
||||
{
|
||||
errno: 2,
|
||||
code: 'EADDRINFO',
|
||||
description: 'getaddrinfo error'
|
||||
},
|
||||
{
|
||||
errno: 3,
|
||||
code: 'EACCES',
|
||||
description: 'permission denied'
|
||||
},
|
||||
{
|
||||
errno: 4,
|
||||
code: 'EAGAIN',
|
||||
description: 'resource temporarily unavailable'
|
||||
},
|
||||
{
|
||||
errno: 5,
|
||||
code: 'EADDRINUSE',
|
||||
description: 'address already in use'
|
||||
},
|
||||
{
|
||||
errno: 6,
|
||||
code: 'EADDRNOTAVAIL',
|
||||
description: 'address not available'
|
||||
},
|
||||
{
|
||||
errno: 7,
|
||||
code: 'EAFNOSUPPORT',
|
||||
description: 'address family not supported'
|
||||
},
|
||||
{
|
||||
errno: 8,
|
||||
code: 'EALREADY',
|
||||
description: 'connection already in progress'
|
||||
},
|
||||
{
|
||||
errno: 9,
|
||||
code: 'EBADF',
|
||||
description: 'bad file descriptor'
|
||||
},
|
||||
{
|
||||
errno: 10,
|
||||
code: 'EBUSY',
|
||||
description: 'resource busy or locked'
|
||||
},
|
||||
{
|
||||
errno: 11,
|
||||
code: 'ECONNABORTED',
|
||||
description: 'software caused connection abort'
|
||||
},
|
||||
{
|
||||
errno: 12,
|
||||
code: 'ECONNREFUSED',
|
||||
description: 'connection refused'
|
||||
},
|
||||
{
|
||||
errno: 13,
|
||||
code: 'ECONNRESET',
|
||||
description: 'connection reset by peer'
|
||||
},
|
||||
{
|
||||
errno: 14,
|
||||
code: 'EDESTADDRREQ',
|
||||
description: 'destination address required'
|
||||
},
|
||||
{
|
||||
errno: 15,
|
||||
code: 'EFAULT',
|
||||
description: 'bad address in system call argument'
|
||||
},
|
||||
{
|
||||
errno: 16,
|
||||
code: 'EHOSTUNREACH',
|
||||
description: 'host is unreachable'
|
||||
},
|
||||
{
|
||||
errno: 17,
|
||||
code: 'EINTR',
|
||||
description: 'interrupted system call'
|
||||
},
|
||||
{
|
||||
errno: 18,
|
||||
code: 'EINVAL',
|
||||
description: 'invalid argument'
|
||||
},
|
||||
{
|
||||
errno: 19,
|
||||
code: 'EISCONN',
|
||||
description: 'socket is already connected'
|
||||
},
|
||||
{
|
||||
errno: 20,
|
||||
code: 'EMFILE',
|
||||
description: 'too many open files'
|
||||
},
|
||||
{
|
||||
errno: 21,
|
||||
code: 'EMSGSIZE',
|
||||
description: 'message too long'
|
||||
},
|
||||
{
|
||||
errno: 22,
|
||||
code: 'ENETDOWN',
|
||||
description: 'network is down'
|
||||
},
|
||||
{
|
||||
errno: 23,
|
||||
code: 'ENETUNREACH',
|
||||
description: 'network is unreachable'
|
||||
},
|
||||
{
|
||||
errno: 24,
|
||||
code: 'ENFILE',
|
||||
description: 'file table overflow'
|
||||
},
|
||||
{
|
||||
errno: 25,
|
||||
code: 'ENOBUFS',
|
||||
description: 'no buffer space available'
|
||||
},
|
||||
{
|
||||
errno: 26,
|
||||
code: 'ENOMEM',
|
||||
description: 'not enough memory'
|
||||
},
|
||||
{
|
||||
errno: 27,
|
||||
code: 'ENOTDIR',
|
||||
description: 'not a directory'
|
||||
},
|
||||
{
|
||||
errno: 28,
|
||||
code: 'EISDIR',
|
||||
description: 'illegal operation on a directory'
|
||||
},
|
||||
{
|
||||
errno: 29,
|
||||
code: 'ENONET',
|
||||
description: 'machine is not on the network'
|
||||
},
|
||||
{
|
||||
errno: 31,
|
||||
code: 'ENOTCONN',
|
||||
description: 'socket is not connected'
|
||||
},
|
||||
{
|
||||
errno: 32,
|
||||
code: 'ENOTSOCK',
|
||||
description: 'socket operation on non-socket'
|
||||
},
|
||||
{
|
||||
errno: 33,
|
||||
code: 'ENOTSUP',
|
||||
description: 'operation not supported on socket'
|
||||
},
|
||||
{
|
||||
errno: 34,
|
||||
code: 'ENOENT',
|
||||
description: 'no such file or directory'
|
||||
},
|
||||
{
|
||||
errno: 35,
|
||||
code: 'ENOSYS',
|
||||
description: 'function not implemented'
|
||||
},
|
||||
{
|
||||
errno: 36,
|
||||
code: 'EPIPE',
|
||||
description: 'broken pipe'
|
||||
},
|
||||
{
|
||||
errno: 37,
|
||||
code: 'EPROTO',
|
||||
description: 'protocol error'
|
||||
},
|
||||
{
|
||||
errno: 38,
|
||||
code: 'EPROTONOSUPPORT',
|
||||
description: 'protocol not supported'
|
||||
},
|
||||
{
|
||||
errno: 39,
|
||||
code: 'EPROTOTYPE',
|
||||
description: 'protocol wrong type for socket'
|
||||
},
|
||||
{
|
||||
errno: 40,
|
||||
code: 'ETIMEDOUT',
|
||||
description: 'connection timed out'
|
||||
},
|
||||
{
|
||||
errno: 41,
|
||||
code: 'ECHARSET',
|
||||
description: 'invalid Unicode character'
|
||||
},
|
||||
{
|
||||
errno: 42,
|
||||
code: 'EAIFAMNOSUPPORT',
|
||||
description: 'address family for hostname not supported'
|
||||
},
|
||||
{
|
||||
errno: 44,
|
||||
code: 'EAISERVICE',
|
||||
description: 'servname not supported for ai_socktype'
|
||||
},
|
||||
{
|
||||
errno: 45,
|
||||
code: 'EAISOCKTYPE',
|
||||
description: 'ai_socktype not supported'
|
||||
},
|
||||
{
|
||||
errno: 46,
|
||||
code: 'ESHUTDOWN',
|
||||
description: 'cannot send after transport endpoint shutdown'
|
||||
},
|
||||
{
|
||||
errno: 47,
|
||||
code: 'EEXIST',
|
||||
description: 'file already exists'
|
||||
},
|
||||
{
|
||||
errno: 48,
|
||||
code: 'ESRCH',
|
||||
description: 'no such process'
|
||||
},
|
||||
{
|
||||
errno: 49,
|
||||
code: 'ENAMETOOLONG',
|
||||
description: 'name too long'
|
||||
},
|
||||
{
|
||||
errno: 50,
|
||||
code: 'EPERM',
|
||||
description: 'operation not permitted'
|
||||
},
|
||||
{
|
||||
errno: 51,
|
||||
code: 'ELOOP',
|
||||
description: 'too many symbolic links encountered'
|
||||
},
|
||||
{
|
||||
errno: 52,
|
||||
code: 'EXDEV',
|
||||
description: 'cross-device link not permitted'
|
||||
},
|
||||
{
|
||||
errno: 53,
|
||||
code: 'ENOTEMPTY',
|
||||
description: 'directory not empty'
|
||||
},
|
||||
{
|
||||
errno: 54,
|
||||
code: 'ENOSPC',
|
||||
description: 'no space left on device'
|
||||
},
|
||||
{
|
||||
errno: 55,
|
||||
code: 'EIO',
|
||||
description: 'i/o error'
|
||||
},
|
||||
{
|
||||
errno: 56,
|
||||
code: 'EROFS',
|
||||
description: 'read-only file system'
|
||||
},
|
||||
{
|
||||
errno: 57,
|
||||
code: 'ENODEV',
|
||||
description: 'no such device'
|
||||
},
|
||||
{
|
||||
errno: 58,
|
||||
code: 'ESPIPE',
|
||||
description: 'invalid seek'
|
||||
},
|
||||
{
|
||||
errno: 59,
|
||||
code: 'ECANCELED',
|
||||
description: 'operation canceled'
|
||||
}
|
||||
]
|
||||
|
||||
module.exports.errno = {}
|
||||
module.exports.code = {}
|
||||
|
||||
all.forEach(function (error) {
|
||||
module.exports.errno[error.errno] = error
|
||||
module.exports.code[error.code] = error
|
||||
})
|
||||
|
||||
module.exports.custom = require('./custom')(module.exports)
|
||||
module.exports.create = module.exports.custom.createError
|
33
node_modules/errno/package.json
generated
vendored
33
node_modules/errno/package.json
generated
vendored
|
@ -1,33 +0,0 @@
|
|||
{
|
||||
"name": "errno",
|
||||
"authors": [
|
||||
"Rod Vagg @rvagg <rod@vagg.org> (https://github.com/rvagg)"
|
||||
],
|
||||
"description": "libuv errno details exposed",
|
||||
"keywords": [
|
||||
"errors",
|
||||
"errno",
|
||||
"libuv"
|
||||
],
|
||||
"version": "0.1.8",
|
||||
"main": "errno.js",
|
||||
"dependencies": {
|
||||
"prr": "~1.0.1"
|
||||
},
|
||||
"bin": {
|
||||
"errno": "./cli.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"error-stack-parser": "^2.0.1",
|
||||
"inherits": "^2.0.3",
|
||||
"tape": "~4.8.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/rvagg/node-errno.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "node --use_strict test.js"
|
||||
}
|
||||
}
|
88
node_modules/errno/test.js
generated
vendored
88
node_modules/errno/test.js
generated
vendored
|
@ -1,88 +0,0 @@
|
|||
var test = require('tape')
|
||||
, inherits = require('inherits')
|
||||
, ErrorStackParser = require('error-stack-parser')
|
||||
, errno = require('./')
|
||||
|
||||
test('sanity checks', function (t) {
|
||||
t.ok(errno.all, 'errno.all not found')
|
||||
t.ok(errno.errno, 'errno.errno not found')
|
||||
t.ok(errno.code, 'errno.code not found')
|
||||
|
||||
t.equal(errno.all.length, 60, 'found ' + errno.all.length + ', expected 60')
|
||||
t.equal(errno.errno['-1'], errno.all[1], 'errno -1 not second element')
|
||||
|
||||
t.equal(errno.code['UNKNOWN'], errno.all[1], 'code UNKNOWN not second element')
|
||||
|
||||
t.equal(errno.errno[1], errno.all[3], 'errno 1 not fourth element')
|
||||
|
||||
t.equal(errno.code['EOF'], errno.all[3], 'code EOF not fourth element')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('custom errors', function (t) {
|
||||
const Cust = errno.create('FooNotBarError')
|
||||
const cust = new Cust('foo is not bar')
|
||||
|
||||
t.equal(cust.name, 'FooNotBarError', 'correct custom name')
|
||||
t.equal(cust.type, 'FooNotBarError', 'correct custom type')
|
||||
t.equal(cust.message, 'foo is not bar', 'correct custom message')
|
||||
t.notOk(cust.cause, 'no cause')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('callstack', function (t) {
|
||||
const MyError = errno.create('MyError')
|
||||
|
||||
function lastFunction (ErrorType, cb) {
|
||||
process.nextTick(cb, new ErrorType('oh noes!'))
|
||||
}
|
||||
|
||||
function secondLastFunction (ErrorType, cb) {
|
||||
lastFunction(ErrorType, cb)
|
||||
}
|
||||
|
||||
function testFrames (t) {
|
||||
return function (err) {
|
||||
const stack = ErrorStackParser.parse(err)
|
||||
t.same(stack[0].functionName, 'lastFunction', 'last stack frame ok')
|
||||
t.same(stack[1].functionName, 'secondLastFunction', 'second last stack frame ok')
|
||||
t.end()
|
||||
}
|
||||
}
|
||||
|
||||
t.test('custom error, default prototype', function (t) {
|
||||
secondLastFunction(MyError, testFrames(t))
|
||||
})
|
||||
|
||||
t.test('custom error, custom prototype', function (t) {
|
||||
const MyError2 = errno.create('MyError2', MyError)
|
||||
secondLastFunction(MyError2, testFrames(t))
|
||||
})
|
||||
|
||||
t.test('custom error, using inheritance', function (t) {
|
||||
const CustomError = errno.custom.CustomError
|
||||
|
||||
function MyError3 (message, cause) {
|
||||
CustomError.call(this, message, cause)
|
||||
}
|
||||
|
||||
inherits(MyError3, CustomError)
|
||||
|
||||
secondLastFunction(MyError3, testFrames(t))
|
||||
})
|
||||
})
|
||||
|
||||
test('error without message', function (t) {
|
||||
const Cust = errno.create('WriteError')
|
||||
const cust = new Cust({
|
||||
code: 22,
|
||||
message: '',
|
||||
name: 'QuotaExceededError'
|
||||
})
|
||||
|
||||
t.equal(cust.name, 'WriteError', 'correct custom name')
|
||||
t.equal(cust.type, 'WriteError', 'correct custom type')
|
||||
t.equal(cust.message, 'QuotaExceededError', 'message is the name')
|
||||
t.notOk(cust.cause, 'no cause')
|
||||
t.end()
|
||||
})
|
9
node_modules/image-size/LICENSE
generated
vendored
9
node_modules/image-size/LICENSE
generated
vendored
|
@ -1,9 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright © 2017 Aditya Yadav, http://netroy.in
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
88
node_modules/image-size/Readme.md
generated
vendored
88
node_modules/image-size/Readme.md
generated
vendored
|
@ -1,88 +0,0 @@
|
|||
# image-size
|
||||
|
||||
[![NPM Version](https://img.shields.io/npm/v/image-size.svg)](https://www.npmjs.com/package/image-size)
|
||||
[![Build Status](https://travis-ci.org/image-size/image-size.svg?branch=master)](https://travis-ci.org/image-size/image-size)
|
||||
[![NPM Downloads](https://img.shields.io/npm/dm/image-size.svg)](http://npm-stat.com/charts.html?package=image-size&author=&from=&to=)
|
||||
[![Coverage Status](https://img.shields.io/coveralls/image-size/image-size/master.svg)](https://coveralls.io/github/image-size/image-size?branch=master)
|
||||
[![devDependency Status](https://david-dm.org/image-size/image-size/dev-status.svg)](https://david-dm.org/image-size/image-size#info=devDependencies)
|
||||
|
||||
A [Node](https://nodejs.org/en/) module to get dimensions of any image file
|
||||
|
||||
## Supported formats
|
||||
|
||||
* BMP
|
||||
* GIF
|
||||
* JPEG
|
||||
* PNG
|
||||
* PSD
|
||||
* TIFF
|
||||
* WebP
|
||||
* SVG
|
||||
* DDS
|
||||
|
||||
### Upcoming
|
||||
|
||||
* SWF
|
||||
|
||||
## Programmatic Usage
|
||||
|
||||
```
|
||||
npm install image-size --save
|
||||
```
|
||||
|
||||
### Synchronous
|
||||
|
||||
```javascript
|
||||
var sizeOf = require('image-size');
|
||||
var dimensions = sizeOf('images/funny-cats.png');
|
||||
console.log(dimensions.width, dimensions.height);
|
||||
```
|
||||
|
||||
### Asynchronous
|
||||
|
||||
```javascript
|
||||
var sizeOf = require('image-size');
|
||||
sizeOf('images/funny-cats.png', function (err, dimensions) {
|
||||
console.log(dimensions.width, dimensions.height);
|
||||
});
|
||||
```
|
||||
NOTE: The asynchronous version doesn't work if the input is a Buffer. Use synchronous version instead.
|
||||
|
||||
### Using a URL
|
||||
|
||||
```javascript
|
||||
var url = require('url');
|
||||
var http = require('http');
|
||||
|
||||
var sizeOf = require('image-size');
|
||||
|
||||
var imgUrl = 'http://my-amazing-website.com/image.jpeg';
|
||||
var options = url.parse(imgUrl);
|
||||
|
||||
http.get(options, function (response) {
|
||||
var chunks = [];
|
||||
response.on('data', function (chunk) {
|
||||
chunks.push(chunk);
|
||||
}).on('end', function() {
|
||||
var buffer = Buffer.concat(chunks);
|
||||
console.log(sizeOf(buffer));
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
You can optionally check the buffer lengths & stop downloading the image after a few kilobytes.
|
||||
**You don't need to download the entire image**
|
||||
|
||||
## Command-Line Usage (CLI)
|
||||
|
||||
```
|
||||
npm install image-size --global
|
||||
image-size image1 [image2] [image3] ...
|
||||
```
|
||||
|
||||
## Credits
|
||||
|
||||
not a direct port, but an attempt to have something like
|
||||
[dabble's imagesize](https://github.com/dabble/imagesize/blob/master/lib/image_size.rb) as a node module.
|
||||
|
||||
## [Contributors](Contributors.md)
|
36
node_modules/image-size/bin/image-size.js
generated
vendored
36
node_modules/image-size/bin/image-size.js
generated
vendored
|
@ -1,36 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var imageSize = require('..');
|
||||
|
||||
var files = process.argv.slice(2);
|
||||
|
||||
if (!files.length) {
|
||||
console.error('Usage: image-size image1 [image2] [image3] ...');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
var red = ['\x1B[31m', '\x1B[39m'];
|
||||
// var bold = ['\x1B[1m', '\x1B[22m'];
|
||||
var grey = ['\x1B[90m', '\x1B[39m'];
|
||||
var green = ['\x1B[32m', '\x1B[39m'];
|
||||
|
||||
files.forEach(function (image) {
|
||||
try {
|
||||
if (fs.existsSync(path.resolve(image))) {
|
||||
var size = imageSize(image);
|
||||
var label = green[0] + size.width + green[1] +
|
||||
grey[0] + 'x' + grey[1] +
|
||||
green[0] + size.height + green[1];
|
||||
console.info(label, '-', grey[0] + image + grey[1]);
|
||||
} else {
|
||||
console.error('file doesn\'t exist - ', image);
|
||||
}
|
||||
} catch (e) {
|
||||
// console.error(e.stack);
|
||||
console.error(red[0] + e.message + red[1], '-', image);
|
||||
}
|
||||
});
|
19
node_modules/image-size/lib/detector.js
generated
vendored
19
node_modules/image-size/lib/detector.js
generated
vendored
|
@ -1,19 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var typeMap = {};
|
||||
var types = require('./types');
|
||||
|
||||
// load all available handlers
|
||||
types.forEach(function (type) {
|
||||
typeMap[type] = require('./types/' + type).detect;
|
||||
});
|
||||
|
||||
module.exports = function (buffer, filepath) {
|
||||
var type, result;
|
||||
for (type in typeMap) {
|
||||
result = typeMap[type](buffer, filepath);
|
||||
if (result) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
};
|
105
node_modules/image-size/lib/index.js
generated
vendored
105
node_modules/image-size/lib/index.js
generated
vendored
|
@ -1,105 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var detector = require('./detector');
|
||||
|
||||
var handlers = {};
|
||||
var types = require('./types');
|
||||
|
||||
// load all available handlers
|
||||
types.forEach(function (type) {
|
||||
handlers[type] = require('./types/' + type);
|
||||
});
|
||||
|
||||
// Maximum buffer size, with a default of 128 kilobytes.
|
||||
// TO-DO: make this adaptive based on the initial signature of the image
|
||||
var MaxBufferSize = 128*1024;
|
||||
|
||||
function lookup (buffer, filepath) {
|
||||
// detect the file type.. don't rely on the extension
|
||||
var type = detector(buffer, filepath);
|
||||
|
||||
// find an appropriate handler for this file type
|
||||
if (type in handlers) {
|
||||
var size = handlers[type].calculate(buffer, filepath);
|
||||
if (size !== false) {
|
||||
size.type = type;
|
||||
return size;
|
||||
}
|
||||
}
|
||||
|
||||
// throw up, if we don't understand the file
|
||||
throw new TypeError('unsupported file type: ' + type + ' (file: ' + filepath + ')');
|
||||
}
|
||||
|
||||
function asyncFileToBuffer (filepath, callback) {
|
||||
// open the file in read only mode
|
||||
fs.open(filepath, 'r', function (err, descriptor) {
|
||||
if (err) { return callback(err); }
|
||||
var size = fs.fstatSync(descriptor).size;
|
||||
if (size <= 0){return callback(new Error("File size is not greater than 0 —— " + filepath)); }
|
||||
var bufferSize = Math.min(size, MaxBufferSize);
|
||||
var buffer = new Buffer(bufferSize);
|
||||
// read first buffer block from the file, asynchronously
|
||||
fs.read(descriptor, buffer, 0, bufferSize, 0, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
// close the file, we are done
|
||||
fs.close(descriptor, function (err) {
|
||||
callback(err, buffer);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function syncFileToBuffer (filepath) {
|
||||
// read from the file, synchronously
|
||||
var descriptor = fs.openSync(filepath, 'r');
|
||||
var size = fs.fstatSync(descriptor).size;
|
||||
var bufferSize = Math.min(size, MaxBufferSize);
|
||||
var buffer = new Buffer(bufferSize);
|
||||
fs.readSync(descriptor, buffer, 0, bufferSize, 0);
|
||||
fs.closeSync(descriptor);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @params input - buffer or relative/absolute path of the image file
|
||||
* @params callback - optional function for async detection
|
||||
*/
|
||||
module.exports = function (input, callback) {
|
||||
|
||||
// Handle buffer input
|
||||
if (Buffer.isBuffer(input)) {
|
||||
return lookup(input);
|
||||
}
|
||||
|
||||
// input should be a string at this point
|
||||
if (typeof input !== 'string') {
|
||||
throw new TypeError('invalid invocation');
|
||||
}
|
||||
|
||||
// resolve the file path
|
||||
var filepath = path.resolve(input);
|
||||
|
||||
if (typeof callback === 'function') {
|
||||
asyncFileToBuffer(filepath, function (err, buffer) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
// return the dimensions
|
||||
var dimensions;
|
||||
try {
|
||||
dimensions = lookup(buffer, filepath);
|
||||
} catch (e) {
|
||||
err = e;
|
||||
}
|
||||
callback(err, dimensions);
|
||||
});
|
||||
} else {
|
||||
var buffer = syncFileToBuffer(filepath);
|
||||
return lookup(buffer, filepath);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.types = types;
|
11
node_modules/image-size/lib/readUInt.js
generated
vendored
11
node_modules/image-size/lib/readUInt.js
generated
vendored
|
@ -1,11 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
// Abstract reading multi-byte unsigned integers
|
||||
function readUInt (buffer, bits, offset, isBigEndian) {
|
||||
offset = offset || 0;
|
||||
var endian = !!isBigEndian ? 'BE' : 'LE';
|
||||
var method = buffer['readUInt' + bits + endian];
|
||||
return method.call(buffer, offset);
|
||||
}
|
||||
|
||||
module.exports = readUInt;
|
13
node_modules/image-size/lib/types.js
generated
vendored
13
node_modules/image-size/lib/types.js
generated
vendored
|
@ -1,13 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = [
|
||||
'bmp',
|
||||
'gif',
|
||||
'jpg',
|
||||
'png',
|
||||
'psd',
|
||||
'svg',
|
||||
'tiff',
|
||||
'webp',
|
||||
'dds'
|
||||
];
|
17
node_modules/image-size/lib/types/bmp.js
generated
vendored
17
node_modules/image-size/lib/types/bmp.js
generated
vendored
|
@ -1,17 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
function isBMP (buffer) {
|
||||
return ('BM' === buffer.toString('ascii', 0, 2));
|
||||
}
|
||||
|
||||
function calculate (buffer) {
|
||||
return {
|
||||
'width': buffer.readUInt32LE(18),
|
||||
'height': Math.abs(buffer.readInt32LE(22))
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isBMP,
|
||||
'calculate': calculate
|
||||
};
|
18
node_modules/image-size/lib/types/dds.js
generated
vendored
18
node_modules/image-size/lib/types/dds.js
generated
vendored
|
@ -1,18 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
function isDDS(buffer){
|
||||
return buffer.readUInt32LE(0) === 0x20534444;
|
||||
}
|
||||
|
||||
function calculate(buffer){
|
||||
// read file resolution metadata
|
||||
return {
|
||||
'height': buffer.readUInt32LE(12),
|
||||
'width': buffer.readUInt32LE(16)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isDDS,
|
||||
'calculate': calculate
|
||||
};
|
19
node_modules/image-size/lib/types/gif.js
generated
vendored
19
node_modules/image-size/lib/types/gif.js
generated
vendored
|
@ -1,19 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var gifRegexp = /^GIF8[79]a/;
|
||||
function isGIF (buffer) {
|
||||
var signature = buffer.toString('ascii', 0, 6);
|
||||
return (gifRegexp.test(signature));
|
||||
}
|
||||
|
||||
function calculate(buffer) {
|
||||
return {
|
||||
'width': buffer.readUInt16LE(6),
|
||||
'height': buffer.readUInt16LE(8)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isGIF,
|
||||
'calculate': calculate
|
||||
};
|
62
node_modules/image-size/lib/types/jpg.js
generated
vendored
62
node_modules/image-size/lib/types/jpg.js
generated
vendored
|
@ -1,62 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
// NOTE: we only support baseline and progressive JPGs here
|
||||
// due to the structure of the loader class, we only get a buffer
|
||||
// with a maximum size of 4096 bytes. so if the SOF marker is outside
|
||||
// if this range we can't detect the file size correctly.
|
||||
|
||||
function isJPG (buffer) { //, filepath
|
||||
var SOIMarker = buffer.toString('hex', 0, 2);
|
||||
return ('ffd8' === SOIMarker);
|
||||
}
|
||||
|
||||
function extractSize (buffer, i) {
|
||||
return {
|
||||
'height' : buffer.readUInt16BE(i),
|
||||
'width' : buffer.readUInt16BE(i + 2)
|
||||
};
|
||||
}
|
||||
|
||||
function validateBuffer (buffer, i) {
|
||||
// index should be within buffer limits
|
||||
if (i > buffer.length) {
|
||||
throw new TypeError('Corrupt JPG, exceeded buffer limits');
|
||||
}
|
||||
// Every JPEG block must begin with a 0xFF
|
||||
if (buffer[i] !== 0xFF) {
|
||||
throw new TypeError('Invalid JPG, marker table corrupted');
|
||||
}
|
||||
}
|
||||
|
||||
function calculate (buffer) {
|
||||
|
||||
// Skip 4 chars, they are for signature
|
||||
buffer = buffer.slice(4);
|
||||
|
||||
var i, next;
|
||||
while (buffer.length) {
|
||||
// read length of the next block
|
||||
i = buffer.readUInt16BE(0);
|
||||
|
||||
// ensure correct format
|
||||
validateBuffer(buffer, i);
|
||||
|
||||
// 0xFFC0 is baseline standard(SOF)
|
||||
// 0xFFC1 is baseline optimized(SOF)
|
||||
// 0xFFC2 is progressive(SOF2)
|
||||
next = buffer[i + 1];
|
||||
if (next === 0xC0 || next === 0xC1 || next === 0xC2) {
|
||||
return extractSize(buffer, i + 5);
|
||||
}
|
||||
|
||||
// move to the next block
|
||||
buffer = buffer.slice(i + 2);
|
||||
}
|
||||
|
||||
throw new TypeError('Invalid JPG, no size found');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isJPG,
|
||||
'calculate': calculate
|
||||
};
|
36
node_modules/image-size/lib/types/png.js
generated
vendored
36
node_modules/image-size/lib/types/png.js
generated
vendored
|
@ -1,36 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var pngSignature = 'PNG\r\n\x1a\n';
|
||||
var pngImageHeaderChunkName = 'IHDR';
|
||||
var pngFriedChunkName = 'CgBI'; // Used to detect "fried" png's: http://www.jongware.com/pngdefry.html
|
||||
|
||||
function isPNG (buffer) {
|
||||
if (pngSignature === buffer.toString('ascii', 1, 8)) {
|
||||
var chunkName = buffer.toString('ascii', 12, 16);
|
||||
if (chunkName === pngFriedChunkName) {
|
||||
chunkName = buffer.toString('ascii', 28, 32);
|
||||
}
|
||||
if (chunkName !== pngImageHeaderChunkName) {
|
||||
throw new TypeError('invalid png');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
function calculate (buffer) {
|
||||
if (buffer.toString('ascii', 12, 16) === pngFriedChunkName) {
|
||||
return {
|
||||
'width': buffer.readUInt32BE(32),
|
||||
'height': buffer.readUInt32BE(36)
|
||||
};
|
||||
}
|
||||
return {
|
||||
'width': buffer.readUInt32BE(16),
|
||||
'height': buffer.readUInt32BE(20)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isPNG,
|
||||
'calculate': calculate
|
||||
};
|
17
node_modules/image-size/lib/types/psd.js
generated
vendored
17
node_modules/image-size/lib/types/psd.js
generated
vendored
|
@ -1,17 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
function isPSD (buffer) {
|
||||
return ('8BPS' === buffer.toString('ascii', 0, 4));
|
||||
}
|
||||
|
||||
function calculate (buffer) {
|
||||
return {
|
||||
'width': buffer.readUInt32BE(18),
|
||||
'height': buffer.readUInt32BE(14)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isPSD,
|
||||
'calculate': calculate
|
||||
};
|
78
node_modules/image-size/lib/types/svg.js
generated
vendored
78
node_modules/image-size/lib/types/svg.js
generated
vendored
|
@ -1,78 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var svgReg = /<svg[^>]+[^>]*>/;
|
||||
function isSVG (buffer) {
|
||||
return svgReg.test(buffer);
|
||||
}
|
||||
|
||||
var extractorRegExps = {
|
||||
'root': /<svg\s[^>]+>/,
|
||||
'width': /\bwidth=(['"])([^%]+?)\1/,
|
||||
'height': /\bheight=(['"])([^%]+?)\1/,
|
||||
'viewbox': /\bviewBox=(['"])(.+?)\1/
|
||||
};
|
||||
|
||||
function parseViewbox (viewbox) {
|
||||
var bounds = viewbox.split(' ');
|
||||
return {
|
||||
'width': parseInt(bounds[2], 10),
|
||||
'height': parseInt(bounds[3], 10)
|
||||
};
|
||||
}
|
||||
|
||||
function parseAttributes (root) {
|
||||
var width = root.match(extractorRegExps.width);
|
||||
var height = root.match(extractorRegExps.height);
|
||||
var viewbox = root.match(extractorRegExps.viewbox);
|
||||
return {
|
||||
'width': width && parseInt(width[2], 10),
|
||||
'height': height && parseInt(height[2], 10),
|
||||
'viewbox': viewbox && parseViewbox(viewbox[2])
|
||||
};
|
||||
}
|
||||
|
||||
function calculateByDimensions (attrs) {
|
||||
return {
|
||||
'width': attrs.width,
|
||||
'height': attrs.height
|
||||
};
|
||||
}
|
||||
|
||||
function calculateByViewbox (attrs) {
|
||||
var ratio = attrs.viewbox.width / attrs.viewbox.height;
|
||||
if (attrs.width) {
|
||||
return {
|
||||
'width': attrs.width,
|
||||
'height': Math.floor(attrs.width / ratio)
|
||||
};
|
||||
}
|
||||
if (attrs.height) {
|
||||
return {
|
||||
'width': Math.floor(attrs.height * ratio),
|
||||
'height': attrs.height
|
||||
};
|
||||
}
|
||||
return {
|
||||
'width': attrs.viewbox.width,
|
||||
'height': attrs.viewbox.height
|
||||
};
|
||||
}
|
||||
|
||||
function calculate (buffer) {
|
||||
var root = buffer.toString('utf8').match(extractorRegExps.root);
|
||||
if (root) {
|
||||
var attrs = parseAttributes(root[0]);
|
||||
if (attrs.width && attrs.height) {
|
||||
return calculateByDimensions(attrs);
|
||||
}
|
||||
if (attrs.viewbox) {
|
||||
return calculateByViewbox(attrs);
|
||||
}
|
||||
}
|
||||
throw new TypeError('invalid svg');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isSVG,
|
||||
'calculate': calculate
|
||||
};
|
118
node_modules/image-size/lib/types/tiff.js
generated
vendored
118
node_modules/image-size/lib/types/tiff.js
generated
vendored
|
@ -1,118 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
// based on http://www.compix.com/fileformattif.htm
|
||||
// TO-DO: support big-endian as well
|
||||
|
||||
var fs = require('fs');
|
||||
var readUInt = require('../readUInt');
|
||||
|
||||
function isTIFF (buffer) {
|
||||
var hex4 = buffer.toString('hex', 0, 4);
|
||||
return ('49492a00' === hex4 || '4d4d002a' === hex4);
|
||||
}
|
||||
|
||||
// Read IFD (image-file-directory) into a buffer
|
||||
function readIFD (buffer, filepath, isBigEndian) {
|
||||
|
||||
var ifdOffset = readUInt(buffer, 32, 4, isBigEndian);
|
||||
|
||||
// read only till the end of the file
|
||||
var bufferSize = 1024;
|
||||
var fileSize = fs.statSync(filepath).size;
|
||||
if (ifdOffset + bufferSize > fileSize) {
|
||||
bufferSize = fileSize - ifdOffset - 10;
|
||||
}
|
||||
|
||||
// populate the buffer
|
||||
var endBuffer = new Buffer(bufferSize);
|
||||
var descriptor = fs.openSync(filepath, 'r');
|
||||
fs.readSync(descriptor, endBuffer, 0, bufferSize, ifdOffset);
|
||||
|
||||
// var ifdLength = readUInt(endBuffer, 16, 0, isBigEndian);
|
||||
var ifdBuffer = endBuffer.slice(2); //, 2 + 12 * ifdLength);
|
||||
return ifdBuffer;
|
||||
}
|
||||
|
||||
// TIFF values seem to be messed up on Big-Endian, this helps
|
||||
function readValue (buffer, isBigEndian) {
|
||||
var low = readUInt(buffer, 16, 8, isBigEndian);
|
||||
var high = readUInt(buffer, 16, 10, isBigEndian);
|
||||
return (high << 16) + low;
|
||||
}
|
||||
|
||||
// move to the next tag
|
||||
function nextTag (buffer) {
|
||||
if (buffer.length > 24) {
|
||||
return buffer.slice(12);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract IFD tags from TIFF metadata
|
||||
function extractTags (buffer, isBigEndian) {
|
||||
var tags = {};
|
||||
var code, type, length;
|
||||
|
||||
while (buffer && buffer.length) {
|
||||
code = readUInt(buffer, 16, 0, isBigEndian);
|
||||
type = readUInt(buffer, 16, 2, isBigEndian);
|
||||
length = readUInt(buffer, 32, 4, isBigEndian);
|
||||
|
||||
// 0 means end of IFD
|
||||
if (code === 0) {
|
||||
break;
|
||||
} else {
|
||||
// 256 is width, 257 is height
|
||||
// if (code === 256 || code === 257) {
|
||||
if (length === 1 && (type === 3 || type === 4)) {
|
||||
tags[code] = readValue(buffer, isBigEndian);
|
||||
}
|
||||
|
||||
// move to the next tag
|
||||
buffer = nextTag(buffer);
|
||||
}
|
||||
}
|
||||
return tags;
|
||||
}
|
||||
|
||||
// Test if the TIFF is Big Endian or Little Endian
|
||||
function determineEndianness (buffer) {
|
||||
var signature = buffer.toString('ascii', 0, 2);
|
||||
if ('II' === signature) {
|
||||
return 'LE';
|
||||
} else if ('MM' === signature) {
|
||||
return 'BE';
|
||||
}
|
||||
}
|
||||
|
||||
function calculate (buffer, filepath) {
|
||||
|
||||
if (!filepath) {
|
||||
throw new TypeError('Tiff doesn\'t support buffer');
|
||||
}
|
||||
|
||||
// Determine BE/LE
|
||||
var isBigEndian = determineEndianness(buffer) === 'BE';
|
||||
|
||||
// read the IFD
|
||||
var ifdBuffer = readIFD(buffer, filepath, isBigEndian);
|
||||
|
||||
// extract the tags from the IFD
|
||||
var tags = extractTags(ifdBuffer, isBigEndian);
|
||||
|
||||
var width = tags[256];
|
||||
var height = tags[257];
|
||||
|
||||
if (!width || !height) {
|
||||
throw new TypeError('Invalid Tiff, missing tags');
|
||||
}
|
||||
|
||||
return {
|
||||
'width': width,
|
||||
'height': height
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isTIFF,
|
||||
'calculate': calculate
|
||||
};
|
69
node_modules/image-size/lib/types/webp.js
generated
vendored
69
node_modules/image-size/lib/types/webp.js
generated
vendored
|
@ -1,69 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
// based on https://developers.google.com/speed/webp/docs/riff_container
|
||||
|
||||
function isWebP (buffer) {
|
||||
var riffHeader = 'RIFF' === buffer.toString('ascii', 0, 4);
|
||||
var webpHeader = 'WEBP' === buffer.toString('ascii', 8, 12);
|
||||
var vp8Header = 'VP8' === buffer.toString('ascii', 12, 15);
|
||||
return (riffHeader && webpHeader && vp8Header);
|
||||
}
|
||||
|
||||
function calculate (buffer) {
|
||||
var chunkHeader = buffer.toString('ascii', 12, 16);
|
||||
buffer = buffer.slice(20, 30);
|
||||
|
||||
// Extended webp stream signature
|
||||
if (chunkHeader === 'VP8X') {
|
||||
var extendedHeader = buffer[0];
|
||||
var validStart = (extendedHeader & 0xc0) === 0;
|
||||
var validEnd = (extendedHeader & 0x01) === 0;
|
||||
if (validStart && validEnd) {
|
||||
return calculateExtended(buffer);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Lossless webp stream signature
|
||||
if (chunkHeader === 'VP8 ' && buffer[0] !== 0x2f) {
|
||||
return calculateLossy(buffer);
|
||||
}
|
||||
|
||||
// Lossy webp stream signature
|
||||
var signature = buffer.toString('hex', 3, 6);
|
||||
if (chunkHeader === 'VP8L' && signature !== '9d012a') {
|
||||
return calculateLossless(buffer);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function calculateExtended (buffer) {
|
||||
return {
|
||||
'width': 1 + buffer.readUIntLE(4, 3),
|
||||
'height': 1 + buffer.readUIntLE(7, 3)
|
||||
}
|
||||
}
|
||||
|
||||
function calculateLossless (buffer) {
|
||||
return {
|
||||
'width': 1 + (((buffer[2] & 0x3F) << 8) | buffer[1]),
|
||||
'height': 1 + (((buffer[4] & 0xF) << 10) | (buffer[3] << 2) |
|
||||
((buffer[2] & 0xC0) >> 6))
|
||||
};
|
||||
}
|
||||
|
||||
function calculateLossy (buffer) {
|
||||
// `& 0x3fff` returns the last 14 bits
|
||||
// TO-DO: include webp scaling in the calculations
|
||||
return {
|
||||
'width': buffer.readInt16LE(6) & 0x3fff,
|
||||
'height': buffer.readInt16LE(8) & 0x3fff
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'detect': isWebP,
|
||||
'calculate': calculate
|
||||
};
|
49
node_modules/image-size/package.json
generated
vendored
49
node_modules/image-size/package.json
generated
vendored
|
@ -1,49 +0,0 @@
|
|||
{
|
||||
"name": "image-size",
|
||||
"version": "0.5.5",
|
||||
"description": "get dimensions of any image file",
|
||||
"main": "lib/index.js",
|
||||
"files": [
|
||||
"bin",
|
||||
"lib"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"bin": {
|
||||
"image-size": "bin/image-size.js"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "jshint",
|
||||
"test": "mocha specs",
|
||||
"coverage": "istanbul cover _mocha specs"
|
||||
},
|
||||
"repository": "image-size/image-size",
|
||||
"keywords": [
|
||||
"image",
|
||||
"size",
|
||||
"dimensions",
|
||||
"resolution",
|
||||
"width",
|
||||
"height",
|
||||
"png",
|
||||
"jpeg",
|
||||
"bmp",
|
||||
"gif",
|
||||
"psd",
|
||||
"tiff",
|
||||
"webp",
|
||||
"svg"
|
||||
],
|
||||
"author": "netroy <aditya@netroy.in> (http://netroy.in/)",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"escomplex-js": "^1.2.0",
|
||||
"expect.js": "^0.3.1",
|
||||
"glob": "^7.1.1",
|
||||
"istanbul": "^1.1.0-alpha.1",
|
||||
"jshint": "^2.9.4",
|
||||
"mocha": "^3.4.1",
|
||||
"sinon": "^2.2.0"
|
||||
}
|
||||
}
|
39
node_modules/make-dir/index.d.ts
generated
vendored
39
node_modules/make-dir/index.d.ts
generated
vendored
|
@ -1,39 +0,0 @@
|
|||
/// <reference types="node"/>
|
||||
import * as fs from 'fs';
|
||||
|
||||
export interface Options {
|
||||
/**
|
||||
* Directory [permissions](https://x-team.com/blog/file-system-permissions-umask-node-js/).
|
||||
*
|
||||
* @default 0o777 & (~process.umask())
|
||||
*/
|
||||
readonly mode?: number;
|
||||
|
||||
/**
|
||||
* Use a custom `fs` implementation. For example [`graceful-fs`](https://github.com/isaacs/node-graceful-fs).
|
||||
*
|
||||
* Using a custom `fs` implementation will block the use of the native `recursive` option if `fs.mkdir` or `fs.mkdirSync` is not the native function.
|
||||
*
|
||||
* @default require('fs')
|
||||
*/
|
||||
readonly fs?: typeof fs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a directory and its parents if needed - Think `mkdir -p`.
|
||||
*
|
||||
* @param path - Directory to create.
|
||||
* @returns A `Promise` for the path to the created directory.
|
||||
*/
|
||||
export default function makeDir(
|
||||
path: string,
|
||||
options?: Options
|
||||
): Promise<string>;
|
||||
|
||||
/**
|
||||
* Synchronously make a directory and its parents if needed - Think `mkdir -p`.
|
||||
*
|
||||
* @param path - Directory to create.
|
||||
* @returns The path to the created directory.
|
||||
*/
|
||||
export function sync(path: string, options?: Options): string;
|
139
node_modules/make-dir/index.js
generated
vendored
139
node_modules/make-dir/index.js
generated
vendored
|
@ -1,139 +0,0 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const pify = require('pify');
|
||||
const semver = require('semver');
|
||||
|
||||
const defaults = {
|
||||
mode: 0o777 & (~process.umask()),
|
||||
fs
|
||||
};
|
||||
|
||||
const useNativeRecursiveOption = semver.satisfies(process.version, '>=10.12.0');
|
||||
|
||||
// https://github.com/nodejs/node/issues/8987
|
||||
// https://github.com/libuv/libuv/pull/1088
|
||||
const checkPath = pth => {
|
||||
if (process.platform === 'win32') {
|
||||
const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, ''));
|
||||
|
||||
if (pathHasInvalidWinCharacters) {
|
||||
const error = new Error(`Path contains invalid characters: ${pth}`);
|
||||
error.code = 'EINVAL';
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const permissionError = pth => {
|
||||
// This replicates the exception of `fs.mkdir` with native the
|
||||
// `recusive` option when run on an invalid drive under Windows.
|
||||
const error = new Error(`operation not permitted, mkdir '${pth}'`);
|
||||
error.code = 'EPERM';
|
||||
error.errno = -4048;
|
||||
error.path = pth;
|
||||
error.syscall = 'mkdir';
|
||||
return error;
|
||||
};
|
||||
|
||||
const makeDir = (input, options) => Promise.resolve().then(() => {
|
||||
checkPath(input);
|
||||
options = Object.assign({}, defaults, options);
|
||||
|
||||
// TODO: Use util.promisify when targeting Node.js 8
|
||||
const mkdir = pify(options.fs.mkdir);
|
||||
const stat = pify(options.fs.stat);
|
||||
|
||||
if (useNativeRecursiveOption && options.fs.mkdir === fs.mkdir) {
|
||||
const pth = path.resolve(input);
|
||||
|
||||
return mkdir(pth, {
|
||||
mode: options.mode,
|
||||
recursive: true
|
||||
}).then(() => pth);
|
||||
}
|
||||
|
||||
const make = pth => {
|
||||
return mkdir(pth, options.mode)
|
||||
.then(() => pth)
|
||||
.catch(error => {
|
||||
if (error.code === 'EPERM') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (error.code === 'ENOENT') {
|
||||
if (path.dirname(pth) === pth) {
|
||||
throw permissionError(pth);
|
||||
}
|
||||
|
||||
if (error.message.includes('null bytes')) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return make(path.dirname(pth)).then(() => make(pth));
|
||||
}
|
||||
|
||||
return stat(pth)
|
||||
.then(stats => stats.isDirectory() ? pth : Promise.reject())
|
||||
.catch(() => {
|
||||
throw error;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
return make(path.resolve(input));
|
||||
});
|
||||
|
||||
module.exports = makeDir;
|
||||
module.exports.default = makeDir;
|
||||
|
||||
module.exports.sync = (input, options) => {
|
||||
checkPath(input);
|
||||
options = Object.assign({}, defaults, options);
|
||||
|
||||
if (useNativeRecursiveOption && options.fs.mkdirSync === fs.mkdirSync) {
|
||||
const pth = path.resolve(input);
|
||||
|
||||
fs.mkdirSync(pth, {
|
||||
mode: options.mode,
|
||||
recursive: true
|
||||
});
|
||||
|
||||
return pth;
|
||||
}
|
||||
|
||||
const make = pth => {
|
||||
try {
|
||||
options.fs.mkdirSync(pth, options.mode);
|
||||
} catch (error) {
|
||||
if (error.code === 'EPERM') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (error.code === 'ENOENT') {
|
||||
if (path.dirname(pth) === pth) {
|
||||
throw permissionError(pth);
|
||||
}
|
||||
|
||||
if (error.message.includes('null bytes')) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
make(path.dirname(pth));
|
||||
return make(pth);
|
||||
}
|
||||
|
||||
try {
|
||||
if (!options.fs.statSync(pth).isDirectory()) {
|
||||
throw new Error('The path is not a directory');
|
||||
}
|
||||
} catch (_) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return pth;
|
||||
};
|
||||
|
||||
return make(path.resolve(input));
|
||||
};
|
9
node_modules/make-dir/license
generated
vendored
9
node_modules/make-dir/license
generated
vendored
|
@ -1,9 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
68
node_modules/make-dir/node_modules/pify/index.js
generated
vendored
68
node_modules/make-dir/node_modules/pify/index.js
generated
vendored
|
@ -1,68 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
const processFn = (fn, options) => function (...args) {
|
||||
const P = options.promiseModule;
|
||||
|
||||
return new P((resolve, reject) => {
|
||||
if (options.multiArgs) {
|
||||
args.push((...result) => {
|
||||
if (options.errorFirst) {
|
||||
if (result[0]) {
|
||||
reject(result);
|
||||
} else {
|
||||
result.shift();
|
||||
resolve(result);
|
||||
}
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
});
|
||||
} else if (options.errorFirst) {
|
||||
args.push((error, result) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
args.push(resolve);
|
||||
}
|
||||
|
||||
fn.apply(this, args);
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = (input, options) => {
|
||||
options = Object.assign({
|
||||
exclude: [/.+(Sync|Stream)$/],
|
||||
errorFirst: true,
|
||||
promiseModule: Promise
|
||||
}, options);
|
||||
|
||||
const objType = typeof input;
|
||||
if (!(input !== null && (objType === 'object' || objType === 'function'))) {
|
||||
throw new TypeError(`Expected \`input\` to be a \`Function\` or \`Object\`, got \`${input === null ? 'null' : objType}\``);
|
||||
}
|
||||
|
||||
const filter = key => {
|
||||
const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key);
|
||||
return options.include ? options.include.some(match) : !options.exclude.some(match);
|
||||
};
|
||||
|
||||
let ret;
|
||||
if (objType === 'function') {
|
||||
ret = function (...args) {
|
||||
return options.excludeMain ? input(...args) : processFn(input, options).apply(this, args);
|
||||
};
|
||||
} else {
|
||||
ret = Object.create(Object.getPrototypeOf(input));
|
||||
}
|
||||
|
||||
for (const key in input) { // eslint-disable-line guard-for-in
|
||||
const property = input[key];
|
||||
ret[key] = typeof property === 'function' && filter(key) ? processFn(property, options) : property;
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
9
node_modules/make-dir/node_modules/pify/license
generated
vendored
9
node_modules/make-dir/node_modules/pify/license
generated
vendored
|
@ -1,9 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
51
node_modules/make-dir/node_modules/pify/package.json
generated
vendored
51
node_modules/make-dir/node_modules/pify/package.json
generated
vendored
|
@ -1,51 +0,0 @@
|
|||
{
|
||||
"name": "pify",
|
||||
"version": "4.0.1",
|
||||
"description": "Promisify a callback-style function",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/pify",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava",
|
||||
"optimization-test": "node --allow-natives-syntax optimization-test.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"promise",
|
||||
"promises",
|
||||
"promisify",
|
||||
"all",
|
||||
"denodify",
|
||||
"denodeify",
|
||||
"callback",
|
||||
"cb",
|
||||
"node",
|
||||
"then",
|
||||
"thenify",
|
||||
"convert",
|
||||
"transform",
|
||||
"wrap",
|
||||
"wrapper",
|
||||
"bind",
|
||||
"to",
|
||||
"async",
|
||||
"await",
|
||||
"es2015",
|
||||
"bluebird"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^0.25.0",
|
||||
"pinkie-promise": "^2.0.0",
|
||||
"v8-natives": "^1.1.0",
|
||||
"xo": "^0.23.0"
|
||||
}
|
||||
}
|
145
node_modules/make-dir/node_modules/pify/readme.md
generated
vendored
145
node_modules/make-dir/node_modules/pify/readme.md
generated
vendored
|
@ -1,145 +0,0 @@
|
|||
# pify [![Build Status](https://travis-ci.org/sindresorhus/pify.svg?branch=master)](https://travis-ci.org/sindresorhus/pify)
|
||||
|
||||
> Promisify a callback-style function
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-pify?utm_source=npm-pify&utm_medium=referral&utm_campaign=readme">Get professional support for 'pify' with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install pify
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const fs = require('fs');
|
||||
const pify = require('pify');
|
||||
|
||||
// Promisify a single function
|
||||
pify(fs.readFile)('package.json', 'utf8').then(data => {
|
||||
console.log(JSON.parse(data).name);
|
||||
//=> 'pify'
|
||||
});
|
||||
|
||||
// Promisify all methods in a module
|
||||
pify(fs).readFile('package.json', 'utf8').then(data => {
|
||||
console.log(JSON.parse(data).name);
|
||||
//=> 'pify'
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### pify(input, [options])
|
||||
|
||||
Returns a `Promise` wrapped version of the supplied function or module.
|
||||
|
||||
#### input
|
||||
|
||||
Type: `Function` `Object`
|
||||
|
||||
Callback-style function or module whose methods you want to promisify.
|
||||
|
||||
#### options
|
||||
|
||||
##### multiArgs
|
||||
|
||||
Type: `boolean`<br>
|
||||
Default: `false`
|
||||
|
||||
By default, the promisified function will only return the second argument from the callback, which works fine for most APIs. This option can be useful for modules like `request` that return multiple arguments. Turning this on will make it return an array of all arguments from the callback, excluding the error argument, instead of just the second argument. This also applies to rejections, where it returns an array of all the callback arguments, including the error.
|
||||
|
||||
```js
|
||||
const request = require('request');
|
||||
const pify = require('pify');
|
||||
|
||||
pify(request, {multiArgs: true})('https://sindresorhus.com').then(result => {
|
||||
const [httpResponse, body] = result;
|
||||
});
|
||||
```
|
||||
|
||||
##### include
|
||||
|
||||
Type: `string[]` `RegExp[]`
|
||||
|
||||
Methods in a module to promisify. Remaining methods will be left untouched.
|
||||
|
||||
##### exclude
|
||||
|
||||
Type: `string[]` `RegExp[]`<br>
|
||||
Default: `[/.+(Sync|Stream)$/]`
|
||||
|
||||
Methods in a module **not** to promisify. Methods with names ending with `'Sync'` are excluded by default.
|
||||
|
||||
##### excludeMain
|
||||
|
||||
Type: `boolean`<br>
|
||||
Default: `false`
|
||||
|
||||
If given module is a function itself, it will be promisified. Turn this option on if you want to promisify only methods of the module.
|
||||
|
||||
```js
|
||||
const pify = require('pify');
|
||||
|
||||
function fn() {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn.method = (data, callback) => {
|
||||
setImmediate(() => {
|
||||
callback(null, data);
|
||||
});
|
||||
};
|
||||
|
||||
// Promisify methods but not `fn()`
|
||||
const promiseFn = pify(fn, {excludeMain: true});
|
||||
|
||||
if (promiseFn()) {
|
||||
promiseFn.method('hi').then(data => {
|
||||
console.log(data);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
##### errorFirst
|
||||
|
||||
Type: `boolean`<br>
|
||||
Default: `true`
|
||||
|
||||
Whether the callback has an error as the first argument. You'll want to set this to `false` if you're dealing with an API that doesn't have an error as the first argument, like `fs.exists()`, some browser APIs, Chrome Extension APIs, etc.
|
||||
|
||||
##### promiseModule
|
||||
|
||||
Type: `Function`
|
||||
|
||||
Custom promise module to use instead of the native one.
|
||||
|
||||
Check out [`pinkie-promise`](https://github.com/floatdrop/pinkie-promise) if you need a tiny promise polyfill.
|
||||
|
||||
|
||||
## Related
|
||||
|
||||
- [p-event](https://github.com/sindresorhus/p-event) - Promisify an event by waiting for it to be emitted
|
||||
- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently
|
||||
- [More…](https://github.com/sindresorhus/promise-fun)
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
59
node_modules/make-dir/package.json
generated
vendored
59
node_modules/make-dir/package.json
generated
vendored
|
@ -1,59 +0,0 @@
|
|||
{
|
||||
"name": "make-dir",
|
||||
"version": "2.1.0",
|
||||
"description": "Make a directory and its parents if needed - Think `mkdir -p`",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/make-dir",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && nyc ava && tsd-check"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"mkdir",
|
||||
"mkdirp",
|
||||
"make",
|
||||
"directories",
|
||||
"dir",
|
||||
"dirs",
|
||||
"folders",
|
||||
"directory",
|
||||
"folder",
|
||||
"path",
|
||||
"parent",
|
||||
"parents",
|
||||
"intermediate",
|
||||
"recursively",
|
||||
"recursive",
|
||||
"create",
|
||||
"fs",
|
||||
"filesystem",
|
||||
"file-system"
|
||||
],
|
||||
"dependencies": {
|
||||
"pify": "^4.0.1",
|
||||
"semver": "^5.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/graceful-fs": "^4.1.3",
|
||||
"@types/node": "^11.10.4",
|
||||
"ava": "^1.2.0",
|
||||
"codecov": "^3.0.0",
|
||||
"graceful-fs": "^4.1.11",
|
||||
"nyc": "^13.1.0",
|
||||
"path-type": "^3.0.0",
|
||||
"tempy": "^0.2.1",
|
||||
"tsd-check": "^0.3.0",
|
||||
"xo": "^0.24.0"
|
||||
}
|
||||
}
|
123
node_modules/make-dir/readme.md
generated
vendored
123
node_modules/make-dir/readme.md
generated
vendored
|
@ -1,123 +0,0 @@
|
|||
# make-dir [![Build Status](https://travis-ci.org/sindresorhus/make-dir.svg?branch=master)](https://travis-ci.org/sindresorhus/make-dir) [![codecov](https://codecov.io/gh/sindresorhus/make-dir/branch/master/graph/badge.svg)](https://codecov.io/gh/sindresorhus/make-dir)
|
||||
|
||||
> Make a directory and its parents if needed - Think `mkdir -p`
|
||||
|
||||
|
||||
## Advantages over [`mkdirp`](https://github.com/substack/node-mkdirp)
|
||||
|
||||
- Promise API *(Async/await ready!)*
|
||||
- Fixes many `mkdirp` issues: [#96](https://github.com/substack/node-mkdirp/pull/96) [#70](https://github.com/substack/node-mkdirp/issues/70) [#66](https://github.com/substack/node-mkdirp/issues/66)
|
||||
- 100% test coverage
|
||||
- CI-tested on macOS, Linux, and Windows
|
||||
- Actively maintained
|
||||
- Doesn't bundle a CLI
|
||||
- Uses native the `fs.mkdir/mkdirSync` [`recursive` option](https://nodejs.org/dist/latest/docs/api/fs.html#fs_fs_mkdir_path_options_callback) in Node.js >=10.12.0 unless [overridden](#fs)
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install make-dir
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
$ pwd
|
||||
/Users/sindresorhus/fun
|
||||
$ tree
|
||||
.
|
||||
```
|
||||
|
||||
```js
|
||||
const makeDir = require('make-dir');
|
||||
|
||||
(async () => {
|
||||
const path = await makeDir('unicorn/rainbow/cake');
|
||||
|
||||
console.log(path);
|
||||
//=> '/Users/sindresorhus/fun/unicorn/rainbow/cake'
|
||||
})();
|
||||
```
|
||||
|
||||
```
|
||||
$ tree
|
||||
.
|
||||
└── unicorn
|
||||
└── rainbow
|
||||
└── cake
|
||||
```
|
||||
|
||||
Multiple directories:
|
||||
|
||||
```js
|
||||
const makeDir = require('make-dir');
|
||||
|
||||
(async () => {
|
||||
const paths = await Promise.all([
|
||||
makeDir('unicorn/rainbow'),
|
||||
makeDir('foo/bar')
|
||||
]);
|
||||
|
||||
console.log(paths);
|
||||
/*
|
||||
[
|
||||
'/Users/sindresorhus/fun/unicorn/rainbow',
|
||||
'/Users/sindresorhus/fun/foo/bar'
|
||||
]
|
||||
*/
|
||||
})();
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### makeDir(path, [options])
|
||||
|
||||
Returns a `Promise` for the path to the created directory.
|
||||
|
||||
### makeDir.sync(path, [options])
|
||||
|
||||
Returns the path to the created directory.
|
||||
|
||||
#### path
|
||||
|
||||
Type: `string`
|
||||
|
||||
Directory to create.
|
||||
|
||||
#### options
|
||||
|
||||
Type: `Object`
|
||||
|
||||
##### mode
|
||||
|
||||
Type: `integer`<br>
|
||||
Default: `0o777 & (~process.umask())`
|
||||
|
||||
Directory [permissions](https://x-team.com/blog/file-system-permissions-umask-node-js/).
|
||||
|
||||
##### fs
|
||||
|
||||
Type: `Object`<br>
|
||||
Default: `require('fs')`
|
||||
|
||||
Use a custom `fs` implementation. For example [`graceful-fs`](https://github.com/isaacs/node-graceful-fs).
|
||||
|
||||
Using a custom `fs` implementation will block the use of the native `recursive` option if `fs.mkdir` or `fs.mkdirSync` is not the native function.
|
||||
|
||||
|
||||
## Related
|
||||
|
||||
- [make-dir-cli](https://github.com/sindresorhus/make-dir-cli) - CLI for this module
|
||||
- [del](https://github.com/sindresorhus/del) - Delete files and directories
|
||||
- [globby](https://github.com/sindresorhus/globby) - User-friendly glob matching
|
||||
- [cpy](https://github.com/sindresorhus/cpy) - Copy files
|
||||
- [cpy-cli](https://github.com/sindresorhus/cpy-cli) - Copy files on the command-line
|
||||
- [move-file](https://github.com/sindresorhus/move-file) - Move a file
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
0
node_modules/mime/.npmignore
generated
vendored
0
node_modules/mime/.npmignore
generated
vendored
164
node_modules/mime/CHANGELOG.md
generated
vendored
164
node_modules/mime/CHANGELOG.md
generated
vendored
|
@ -1,164 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
## v1.6.0 (24/11/2017)
|
||||
*No changelog for this release.*
|
||||
|
||||
---
|
||||
|
||||
## v2.0.4 (24/11/2017)
|
||||
- [**closed**] Switch to mime-score module for resolving extension contention issues. [#182](https://github.com/broofa/node-mime/issues/182)
|
||||
- [**closed**] Update mime-db to 1.31.0 in v1.x branch [#181](https://github.com/broofa/node-mime/issues/181)
|
||||
|
||||
---
|
||||
|
||||
## v1.5.0 (22/11/2017)
|
||||
- [**closed**] need ES5 version ready in npm package [#179](https://github.com/broofa/node-mime/issues/179)
|
||||
- [**closed**] mime-db no trace of iWork - pages / numbers / etc. [#178](https://github.com/broofa/node-mime/issues/178)
|
||||
- [**closed**] How it works in brownser ? [#176](https://github.com/broofa/node-mime/issues/176)
|
||||
- [**closed**] Missing `./Mime` [#175](https://github.com/broofa/node-mime/issues/175)
|
||||
- [**closed**] Vulnerable Regular Expression [#167](https://github.com/broofa/node-mime/issues/167)
|
||||
|
||||
---
|
||||
|
||||
## v2.0.3 (25/09/2017)
|
||||
*No changelog for this release.*
|
||||
|
||||
---
|
||||
|
||||
## v1.4.1 (25/09/2017)
|
||||
- [**closed**] Issue when bundling with webpack [#172](https://github.com/broofa/node-mime/issues/172)
|
||||
|
||||
---
|
||||
|
||||
## v2.0.2 (15/09/2017)
|
||||
- [**V2**] fs.readFileSync is not a function [#165](https://github.com/broofa/node-mime/issues/165)
|
||||
- [**closed**] The extension for video/quicktime should map to .mov, not .qt [#164](https://github.com/broofa/node-mime/issues/164)
|
||||
- [**V2**] [v2 Feedback request] Mime class API [#163](https://github.com/broofa/node-mime/issues/163)
|
||||
- [**V2**] [v2 Feedback request] Resolving conflicts over extensions [#162](https://github.com/broofa/node-mime/issues/162)
|
||||
- [**V2**] Allow callers to load module with official, full, or no defined types. [#161](https://github.com/broofa/node-mime/issues/161)
|
||||
- [**V2**] Use "facets" to resolve extension conflicts [#160](https://github.com/broofa/node-mime/issues/160)
|
||||
- [**V2**] Remove fs and path dependencies [#152](https://github.com/broofa/node-mime/issues/152)
|
||||
- [**V2**] Default content-type should not be application/octet-stream [#139](https://github.com/broofa/node-mime/issues/139)
|
||||
- [**V2**] reset mime-types [#124](https://github.com/broofa/node-mime/issues/124)
|
||||
- [**V2**] Extensionless paths should return null or false [#113](https://github.com/broofa/node-mime/issues/113)
|
||||
|
||||
---
|
||||
|
||||
## v2.0.1 (14/09/2017)
|
||||
- [**closed**] Changelog for v2.0 does not mention breaking changes [#171](https://github.com/broofa/node-mime/issues/171)
|
||||
- [**closed**] MIME breaking with 'class' declaration as it is without 'use strict mode' [#170](https://github.com/broofa/node-mime/issues/170)
|
||||
|
||||
---
|
||||
|
||||
## v2.0.0 (12/09/2017)
|
||||
- [**closed**] woff and woff2 [#168](https://github.com/broofa/node-mime/issues/168)
|
||||
|
||||
---
|
||||
|
||||
## v1.4.0 (28/08/2017)
|
||||
- [**closed**] support for ac3 voc files [#159](https://github.com/broofa/node-mime/issues/159)
|
||||
- [**closed**] Help understanding change from application/xml to text/xml [#158](https://github.com/broofa/node-mime/issues/158)
|
||||
- [**closed**] no longer able to override mimetype [#157](https://github.com/broofa/node-mime/issues/157)
|
||||
- [**closed**] application/vnd.adobe.photoshop [#147](https://github.com/broofa/node-mime/issues/147)
|
||||
- [**closed**] Directories should appear as something other than application/octet-stream [#135](https://github.com/broofa/node-mime/issues/135)
|
||||
- [**closed**] requested features [#131](https://github.com/broofa/node-mime/issues/131)
|
||||
- [**closed**] Make types.json loading optional? [#129](https://github.com/broofa/node-mime/issues/129)
|
||||
- [**closed**] Cannot find module './types.json' [#120](https://github.com/broofa/node-mime/issues/120)
|
||||
- [**V2**] .wav files show up as "audio/x-wav" instead of "audio/x-wave" [#118](https://github.com/broofa/node-mime/issues/118)
|
||||
- [**closed**] Don't be a pain in the ass for node community [#108](https://github.com/broofa/node-mime/issues/108)
|
||||
- [**closed**] don't make default_type global [#78](https://github.com/broofa/node-mime/issues/78)
|
||||
- [**closed**] mime.extension() fails if the content-type is parameterized [#74](https://github.com/broofa/node-mime/issues/74)
|
||||
|
||||
---
|
||||
|
||||
## v1.3.6 (11/05/2017)
|
||||
- [**closed**] .md should be text/markdown as of March 2016 [#154](https://github.com/broofa/node-mime/issues/154)
|
||||
- [**closed**] Error while installing mime [#153](https://github.com/broofa/node-mime/issues/153)
|
||||
- [**closed**] application/manifest+json [#149](https://github.com/broofa/node-mime/issues/149)
|
||||
- [**closed**] Dynamic adaptive streaming over HTTP (DASH) file extension typo [#141](https://github.com/broofa/node-mime/issues/141)
|
||||
- [**closed**] charsets image/png undefined [#140](https://github.com/broofa/node-mime/issues/140)
|
||||
- [**closed**] Mime-db dependency out of date [#130](https://github.com/broofa/node-mime/issues/130)
|
||||
- [**closed**] how to support plist? [#126](https://github.com/broofa/node-mime/issues/126)
|
||||
- [**closed**] how does .types file format look like? [#123](https://github.com/broofa/node-mime/issues/123)
|
||||
- [**closed**] Feature: support for expanding MIME patterns [#121](https://github.com/broofa/node-mime/issues/121)
|
||||
- [**closed**] DEBUG_MIME doesn't work [#117](https://github.com/broofa/node-mime/issues/117)
|
||||
|
||||
---
|
||||
|
||||
## v1.3.4 (06/02/2015)
|
||||
*No changelog for this release.*
|
||||
|
||||
---
|
||||
|
||||
## v1.3.3 (06/02/2015)
|
||||
*No changelog for this release.*
|
||||
|
||||
---
|
||||
|
||||
## v1.3.1 (05/02/2015)
|
||||
- [**closed**] Consider adding support for Handlebars .hbs file ending [#111](https://github.com/broofa/node-mime/issues/111)
|
||||
- [**closed**] Consider adding support for hjson. [#110](https://github.com/broofa/node-mime/issues/110)
|
||||
- [**closed**] Add mime type for Opus audio files [#94](https://github.com/broofa/node-mime/issues/94)
|
||||
- [**closed**] Consider making the `Requesting New Types` information more visible [#77](https://github.com/broofa/node-mime/issues/77)
|
||||
|
||||
---
|
||||
|
||||
## v1.3.0 (05/02/2015)
|
||||
- [**closed**] Add common name? [#114](https://github.com/broofa/node-mime/issues/114)
|
||||
- [**closed**] application/x-yaml [#104](https://github.com/broofa/node-mime/issues/104)
|
||||
- [**closed**] Add mime type for WOFF file format 2.0 [#102](https://github.com/broofa/node-mime/issues/102)
|
||||
- [**closed**] application/x-msi for .msi [#99](https://github.com/broofa/node-mime/issues/99)
|
||||
- [**closed**] Add mimetype for gettext translation files [#98](https://github.com/broofa/node-mime/issues/98)
|
||||
- [**closed**] collaborators [#88](https://github.com/broofa/node-mime/issues/88)
|
||||
- [**closed**] getting errot in installation of mime module...any1 can help? [#87](https://github.com/broofa/node-mime/issues/87)
|
||||
- [**closed**] should application/json's charset be utf8? [#86](https://github.com/broofa/node-mime/issues/86)
|
||||
- [**closed**] Add "license" and "licenses" to package.json [#81](https://github.com/broofa/node-mime/issues/81)
|
||||
- [**closed**] lookup with extension-less file on Windows returns wrong type [#68](https://github.com/broofa/node-mime/issues/68)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.11 (15/08/2013)
|
||||
- [**closed**] Update mime.types [#65](https://github.com/broofa/node-mime/issues/65)
|
||||
- [**closed**] Publish a new version [#63](https://github.com/broofa/node-mime/issues/63)
|
||||
- [**closed**] README should state upfront that "application/octet-stream" is default for unknown extension [#55](https://github.com/broofa/node-mime/issues/55)
|
||||
- [**closed**] Suggested improvement to the charset API [#52](https://github.com/broofa/node-mime/issues/52)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.10 (25/07/2013)
|
||||
- [**closed**] Mime type for woff files should be application/font-woff and not application/x-font-woff [#62](https://github.com/broofa/node-mime/issues/62)
|
||||
- [**closed**] node.types in conflict with mime.types [#51](https://github.com/broofa/node-mime/issues/51)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.9 (17/01/2013)
|
||||
- [**closed**] Please update "mime" NPM [#49](https://github.com/broofa/node-mime/issues/49)
|
||||
- [**closed**] Please add semicolon [#46](https://github.com/broofa/node-mime/issues/46)
|
||||
- [**closed**] parse full mime types [#43](https://github.com/broofa/node-mime/issues/43)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.8 (10/01/2013)
|
||||
- [**closed**] /js directory mime is application/javascript. Is it correct? [#47](https://github.com/broofa/node-mime/issues/47)
|
||||
- [**closed**] Add mime types for lua code. [#45](https://github.com/broofa/node-mime/issues/45)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.7 (19/10/2012)
|
||||
- [**closed**] cannot install 1.2.7 via npm [#41](https://github.com/broofa/node-mime/issues/41)
|
||||
- [**closed**] Transfer ownership to @broofa [#36](https://github.com/broofa/node-mime/issues/36)
|
||||
- [**closed**] it's wrong to set charset to UTF-8 for text [#30](https://github.com/broofa/node-mime/issues/30)
|
||||
- [**closed**] Allow multiple instances of MIME types container [#27](https://github.com/broofa/node-mime/issues/27)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.5 (16/02/2012)
|
||||
- [**closed**] When looking up a types, check hasOwnProperty [#23](https://github.com/broofa/node-mime/issues/23)
|
||||
- [**closed**] Bump version to 1.2.2 [#18](https://github.com/broofa/node-mime/issues/18)
|
||||
- [**closed**] No license [#16](https://github.com/broofa/node-mime/issues/16)
|
||||
- [**closed**] Some types missing that are used by html5/css3 [#13](https://github.com/broofa/node-mime/issues/13)
|
||||
- [**closed**] npm install fails for 1.2.1 [#12](https://github.com/broofa/node-mime/issues/12)
|
||||
- [**closed**] image/pjpeg + image/x-png [#10](https://github.com/broofa/node-mime/issues/10)
|
||||
- [**closed**] symlink [#8](https://github.com/broofa/node-mime/issues/8)
|
||||
- [**closed**] gzip [#2](https://github.com/broofa/node-mime/issues/2)
|
||||
- [**closed**] ALL CAPS filenames return incorrect mime type [#1](https://github.com/broofa/node-mime/issues/1)
|
21
node_modules/mime/LICENSE
generated
vendored
21
node_modules/mime/LICENSE
generated
vendored
|
@ -1,21 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2010 Benjamin Thomas, Robert Kieffer
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
90
node_modules/mime/README.md
generated
vendored
90
node_modules/mime/README.md
generated
vendored
|
@ -1,90 +0,0 @@
|
|||
# mime
|
||||
|
||||
Comprehensive MIME type mapping API based on mime-db module.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](http://github.com/isaacs/npm):
|
||||
|
||||
npm install mime
|
||||
|
||||
## Contributing / Testing
|
||||
|
||||
npm run test
|
||||
|
||||
## Command Line
|
||||
|
||||
mime [path_string]
|
||||
|
||||
E.g.
|
||||
|
||||
> mime scripts/jquery.js
|
||||
application/javascript
|
||||
|
||||
## API - Queries
|
||||
|
||||
### mime.lookup(path)
|
||||
Get the mime type associated with a file, if no mime type is found `application/octet-stream` is returned. Performs a case-insensitive lookup using the extension in `path` (the substring after the last '/' or '.'). E.g.
|
||||
|
||||
```js
|
||||
var mime = require('mime');
|
||||
|
||||
mime.lookup('/path/to/file.txt'); // => 'text/plain'
|
||||
mime.lookup('file.txt'); // => 'text/plain'
|
||||
mime.lookup('.TXT'); // => 'text/plain'
|
||||
mime.lookup('htm'); // => 'text/html'
|
||||
```
|
||||
|
||||
### mime.default_type
|
||||
Sets the mime type returned when `mime.lookup` fails to find the extension searched for. (Default is `application/octet-stream`.)
|
||||
|
||||
### mime.extension(type)
|
||||
Get the default extension for `type`
|
||||
|
||||
```js
|
||||
mime.extension('text/html'); // => 'html'
|
||||
mime.extension('application/octet-stream'); // => 'bin'
|
||||
```
|
||||
|
||||
### mime.charsets.lookup()
|
||||
|
||||
Map mime-type to charset
|
||||
|
||||
```js
|
||||
mime.charsets.lookup('text/plain'); // => 'UTF-8'
|
||||
```
|
||||
|
||||
(The logic for charset lookups is pretty rudimentary. Feel free to suggest improvements.)
|
||||
|
||||
## API - Defining Custom Types
|
||||
|
||||
Custom type mappings can be added on a per-project basis via the following APIs.
|
||||
|
||||
### mime.define()
|
||||
|
||||
Add custom mime/extension mappings
|
||||
|
||||
```js
|
||||
mime.define({
|
||||
'text/x-some-format': ['x-sf', 'x-sft', 'x-sfml'],
|
||||
'application/x-my-type': ['x-mt', 'x-mtt'],
|
||||
// etc ...
|
||||
});
|
||||
|
||||
mime.lookup('x-sft'); // => 'text/x-some-format'
|
||||
```
|
||||
|
||||
The first entry in the extensions array is returned by `mime.extension()`. E.g.
|
||||
|
||||
```js
|
||||
mime.extension('text/x-some-format'); // => 'x-sf'
|
||||
```
|
||||
|
||||
### mime.load(filepath)
|
||||
|
||||
Load mappings from an Apache ".types" format file
|
||||
|
||||
```js
|
||||
mime.load('./my_project.types');
|
||||
```
|
||||
The .types file format is simple - See the `types` dir for examples.
|
8
node_modules/mime/cli.js
generated
vendored
8
node_modules/mime/cli.js
generated
vendored
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var mime = require('./mime.js');
|
||||
var file = process.argv[2];
|
||||
var type = mime.lookup(file);
|
||||
|
||||
process.stdout.write(type + '\n');
|
||||
|
108
node_modules/mime/mime.js
generated
vendored
108
node_modules/mime/mime.js
generated
vendored
|
@ -1,108 +0,0 @@
|
|||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
|
||||
function Mime() {
|
||||
// Map of extension -> mime type
|
||||
this.types = Object.create(null);
|
||||
|
||||
// Map of mime type -> extension
|
||||
this.extensions = Object.create(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define mimetype -> extension mappings. Each key is a mime-type that maps
|
||||
* to an array of extensions associated with the type. The first extension is
|
||||
* used as the default extension for the type.
|
||||
*
|
||||
* e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']});
|
||||
*
|
||||
* @param map (Object) type definitions
|
||||
*/
|
||||
Mime.prototype.define = function (map) {
|
||||
for (var type in map) {
|
||||
var exts = map[type];
|
||||
for (var i = 0; i < exts.length; i++) {
|
||||
if (process.env.DEBUG_MIME && this.types[exts[i]]) {
|
||||
console.warn((this._loading || "define()").replace(/.*\//, ''), 'changes "' + exts[i] + '" extension type from ' +
|
||||
this.types[exts[i]] + ' to ' + type);
|
||||
}
|
||||
|
||||
this.types[exts[i]] = type;
|
||||
}
|
||||
|
||||
// Default extension is the first one we encounter
|
||||
if (!this.extensions[type]) {
|
||||
this.extensions[type] = exts[0];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Load an Apache2-style ".types" file
|
||||
*
|
||||
* This may be called multiple times (it's expected). Where files declare
|
||||
* overlapping types/extensions, the last file wins.
|
||||
*
|
||||
* @param file (String) path of file to load.
|
||||
*/
|
||||
Mime.prototype.load = function(file) {
|
||||
this._loading = file;
|
||||
// Read file and split into lines
|
||||
var map = {},
|
||||
content = fs.readFileSync(file, 'ascii'),
|
||||
lines = content.split(/[\r\n]+/);
|
||||
|
||||
lines.forEach(function(line) {
|
||||
// Clean up whitespace/comments, and split into fields
|
||||
var fields = line.replace(/\s*#.*|^\s*|\s*$/g, '').split(/\s+/);
|
||||
map[fields.shift()] = fields;
|
||||
});
|
||||
|
||||
this.define(map);
|
||||
|
||||
this._loading = null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Lookup a mime type based on extension
|
||||
*/
|
||||
Mime.prototype.lookup = function(path, fallback) {
|
||||
var ext = path.replace(/^.*[\.\/\\]/, '').toLowerCase();
|
||||
|
||||
return this.types[ext] || fallback || this.default_type;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return file extension associated with a mime type
|
||||
*/
|
||||
Mime.prototype.extension = function(mimeType) {
|
||||
var type = mimeType.match(/^\s*([^;\s]*)(?:;|\s|$)/)[1].toLowerCase();
|
||||
return this.extensions[type];
|
||||
};
|
||||
|
||||
// Default instance
|
||||
var mime = new Mime();
|
||||
|
||||
// Define built-in types
|
||||
mime.define(require('./types.json'));
|
||||
|
||||
// Default type
|
||||
mime.default_type = mime.lookup('bin');
|
||||
|
||||
//
|
||||
// Additional API specific to the default instance
|
||||
//
|
||||
|
||||
mime.Mime = Mime;
|
||||
|
||||
/**
|
||||
* Lookup a charset based on mime type.
|
||||
*/
|
||||
mime.charsets = {
|
||||
lookup: function(mimeType, fallback) {
|
||||
// Assume text types are utf8
|
||||
return (/^text\/|^application\/(javascript|json)/).test(mimeType) ? 'UTF-8' : fallback;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = mime;
|
44
node_modules/mime/package.json
generated
vendored
44
node_modules/mime/package.json
generated
vendored
|
@ -1,44 +0,0 @@
|
|||
{
|
||||
"author": {
|
||||
"name": "Robert Kieffer",
|
||||
"url": "http://github.com/broofa",
|
||||
"email": "robert@broofa.com"
|
||||
},
|
||||
"bin": {
|
||||
"mime": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Benjamin Thomas",
|
||||
"url": "http://github.com/bentomas",
|
||||
"email": "benjamin@benjaminthomas.org"
|
||||
}
|
||||
],
|
||||
"description": "A comprehensive library for mime-type mapping",
|
||||
"license": "MIT",
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"github-release-notes": "0.13.1",
|
||||
"mime-db": "1.31.0",
|
||||
"mime-score": "1.1.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prepare": "node src/build.js",
|
||||
"changelog": "gren changelog --tags=all --generate --override",
|
||||
"test": "node src/test.js"
|
||||
},
|
||||
"keywords": [
|
||||
"util",
|
||||
"mime"
|
||||
],
|
||||
"main": "mime.js",
|
||||
"name": "mime",
|
||||
"repository": {
|
||||
"url": "https://github.com/broofa/node-mime",
|
||||
"type": "git"
|
||||
},
|
||||
"version": "1.6.0"
|
||||
}
|
53
node_modules/mime/src/build.js
generated
vendored
53
node_modules/mime/src/build.js
generated
vendored
|
@ -1,53 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const mimeScore = require('mime-score');
|
||||
|
||||
let db = require('mime-db');
|
||||
let chalk = require('chalk');
|
||||
|
||||
const STANDARD_FACET_SCORE = 900;
|
||||
|
||||
const byExtension = {};
|
||||
|
||||
// Clear out any conflict extensions in mime-db
|
||||
for (let type in db) {
|
||||
let entry = db[type];
|
||||
entry.type = type;
|
||||
|
||||
if (!entry.extensions) continue;
|
||||
|
||||
entry.extensions.forEach(ext => {
|
||||
if (ext in byExtension) {
|
||||
const e0 = entry;
|
||||
const e1 = byExtension[ext];
|
||||
e0.pri = mimeScore(e0.type, e0.source);
|
||||
e1.pri = mimeScore(e1.type, e1.source);
|
||||
|
||||
let drop = e0.pri < e1.pri ? e0 : e1;
|
||||
let keep = e0.pri >= e1.pri ? e0 : e1;
|
||||
drop.extensions = drop.extensions.filter(e => e !== ext);
|
||||
|
||||
console.log(`${ext}: Keeping ${chalk.green(keep.type)} (${keep.pri}), dropping ${chalk.red(drop.type)} (${drop.pri})`);
|
||||
}
|
||||
byExtension[ext] = entry;
|
||||
});
|
||||
}
|
||||
|
||||
function writeTypesFile(types, path) {
|
||||
fs.writeFileSync(path, JSON.stringify(types));
|
||||
}
|
||||
|
||||
// Segregate into standard and non-standard types based on facet per
|
||||
// https://tools.ietf.org/html/rfc6838#section-3.1
|
||||
const types = {};
|
||||
|
||||
Object.keys(db).sort().forEach(k => {
|
||||
const entry = db[k];
|
||||
types[entry.type] = entry.extensions;
|
||||
});
|
||||
|
||||
writeTypesFile(types, path.join(__dirname, '..', 'types.json'));
|
60
node_modules/mime/src/test.js
generated
vendored
60
node_modules/mime/src/test.js
generated
vendored
|
@ -1,60 +0,0 @@
|
|||
/**
|
||||
* Usage: node test.js
|
||||
*/
|
||||
|
||||
var mime = require('../mime');
|
||||
var assert = require('assert');
|
||||
var path = require('path');
|
||||
|
||||
//
|
||||
// Test mime lookups
|
||||
//
|
||||
|
||||
assert.equal('text/plain', mime.lookup('text.txt')); // normal file
|
||||
assert.equal('text/plain', mime.lookup('TEXT.TXT')); // uppercase
|
||||
assert.equal('text/plain', mime.lookup('dir/text.txt')); // dir + file
|
||||
assert.equal('text/plain', mime.lookup('.text.txt')); // hidden file
|
||||
assert.equal('text/plain', mime.lookup('.txt')); // nameless
|
||||
assert.equal('text/plain', mime.lookup('txt')); // extension-only
|
||||
assert.equal('text/plain', mime.lookup('/txt')); // extension-less ()
|
||||
assert.equal('text/plain', mime.lookup('\\txt')); // Windows, extension-less
|
||||
assert.equal('application/octet-stream', mime.lookup('text.nope')); // unrecognized
|
||||
assert.equal('fallback', mime.lookup('text.fallback', 'fallback')); // alternate default
|
||||
|
||||
//
|
||||
// Test extensions
|
||||
//
|
||||
|
||||
assert.equal('txt', mime.extension(mime.types.text));
|
||||
assert.equal('html', mime.extension(mime.types.htm));
|
||||
assert.equal('bin', mime.extension('application/octet-stream'));
|
||||
assert.equal('bin', mime.extension('application/octet-stream '));
|
||||
assert.equal('html', mime.extension(' text/html; charset=UTF-8'));
|
||||
assert.equal('html', mime.extension('text/html; charset=UTF-8 '));
|
||||
assert.equal('html', mime.extension('text/html; charset=UTF-8'));
|
||||
assert.equal('html', mime.extension('text/html ; charset=UTF-8'));
|
||||
assert.equal('html', mime.extension('text/html;charset=UTF-8'));
|
||||
assert.equal('html', mime.extension('text/Html;charset=UTF-8'));
|
||||
assert.equal(undefined, mime.extension('unrecognized'));
|
||||
|
||||
//
|
||||
// Test node.types lookups
|
||||
//
|
||||
|
||||
assert.equal('font/woff', mime.lookup('file.woff'));
|
||||
assert.equal('application/octet-stream', mime.lookup('file.buffer'));
|
||||
// TODO: Uncomment once #157 is resolved
|
||||
// assert.equal('audio/mp4', mime.lookup('file.m4a'));
|
||||
assert.equal('font/otf', mime.lookup('file.otf'));
|
||||
|
||||
//
|
||||
// Test charsets
|
||||
//
|
||||
|
||||
assert.equal('UTF-8', mime.charsets.lookup('text/plain'));
|
||||
assert.equal('UTF-8', mime.charsets.lookup(mime.types.js));
|
||||
assert.equal('UTF-8', mime.charsets.lookup(mime.types.json));
|
||||
assert.equal(undefined, mime.charsets.lookup(mime.types.bin));
|
||||
assert.equal('fallback', mime.charsets.lookup('application/octet-stream', 'fallback'));
|
||||
|
||||
console.log('\nAll tests passed');
|
1
node_modules/mime/types.json
generated
vendored
1
node_modules/mime/types.json
generated
vendored
File diff suppressed because one or more lines are too long
27
node_modules/needle/.github/workflows/nodejs.yml
generated
vendored
27
node_modules/needle/.github/workflows/nodejs.yml
generated
vendored
|
@ -1,27 +0,0 @@
|
|||
name: Node CI
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [4.x, 6.x, 8.x, 10.x, 12.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: npm install, build, and test
|
||||
run: |
|
||||
npm install
|
||||
mkdir -p test/keys
|
||||
openssl genrsa -out test/keys/ssl.key 2048
|
||||
openssl req -new -key test/keys/ssl.key -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -x509 -days 999 -out test/keys/ssl.cert
|
||||
npm run build --if-present
|
||||
npm test
|
||||
env:
|
||||
CI: true
|
612
node_modules/needle/README.md
generated
vendored
612
node_modules/needle/README.md
generated
vendored
|
@ -1,612 +0,0 @@
|
|||
Needle
|
||||
======
|
||||
|
||||
[![NPM](https://nodei.co/npm/needle.png)](https://nodei.co/npm/needle/)
|
||||
|
||||
The leanest and most handsome HTTP client in the Nodelands.
|
||||
|
||||
```js
|
||||
var needle = require('needle');
|
||||
|
||||
needle.get('http://www.google.com', function(error, response) {
|
||||
if (!error && response.statusCode == 200)
|
||||
console.log(response.body);
|
||||
});
|
||||
```
|
||||
|
||||
Callbacks not floating your boat? Needle got your back.
|
||||
|
||||
``` js
|
||||
var data = {
|
||||
file: '/home/johnlennon/walrus.png',
|
||||
content_type: 'image/png'
|
||||
};
|
||||
|
||||
// the callback is optional, and needle returns a `readableStream` object
|
||||
// that triggers a 'done' event when the request/response process is complete.
|
||||
needle
|
||||
.post('https://my.server.com/foo', data, { multipart: true })
|
||||
.on('readable', function() { /* eat your chunks */ })
|
||||
.on('done', function(err) {
|
||||
console.log('Ready-o!');
|
||||
})
|
||||
```
|
||||
|
||||
From version 2.0.x up, Promises are also supported. Just call `needle()` directly and you'll get a native Promise object.
|
||||
|
||||
```js
|
||||
needle('put', 'https://hacking.the.gibson/login', { password: 'god' }, { json: true })
|
||||
.then(function(response) {
|
||||
return doSomethingWith(response)
|
||||
})
|
||||
.catch(function(err) {
|
||||
console.log('Call the locksmith!')
|
||||
})
|
||||
```
|
||||
|
||||
With only two real dependencies, Needle supports:
|
||||
|
||||
- HTTP/HTTPS requests, with the usual verbs you would expect
|
||||
- All of Node's native TLS options, such as 'rejectUnauthorized' (see below)
|
||||
- Basic & Digest authentication with auto-detection
|
||||
- Multipart form-data (e.g. file uploads)
|
||||
- HTTP Proxy forwarding, optionally with authentication
|
||||
- Streaming gzip, deflate, and brotli decompression
|
||||
- Automatic XML & JSON parsing
|
||||
- 301/302/303 redirect following, with fine-grained tuning, and
|
||||
- Streaming non-UTF-8 charset decoding, via `iconv-lite`
|
||||
|
||||
And yes, Mr. Wayne, it does come in black.
|
||||
|
||||
This makes Needle an ideal alternative for performing quick HTTP requests in Node, either for API interaction, downloading or uploading streams of data, and so on. If you need OAuth, AWS support or anything fancier, you should check out mikeal's request module.
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
```
|
||||
$ npm install needle
|
||||
```
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
```js
|
||||
// using promises
|
||||
needle('get', 'https://server.com/posts/123')
|
||||
.then(function(resp) {
|
||||
// ...
|
||||
})
|
||||
.catch(function(err) {
|
||||
// ...
|
||||
});
|
||||
|
||||
// with callback
|
||||
needle.get('ifconfig.me/all.json', function(error, response, body) {
|
||||
if (error) throw error;
|
||||
|
||||
// body is an alias for `response.body`,
|
||||
// that in this case holds a JSON-decoded object.
|
||||
console.log(body.ip_addr);
|
||||
});
|
||||
|
||||
// no callback, using streams
|
||||
needle.get('https://google.com/images/logo.png')
|
||||
.pipe(fs.createWriteStream('logo.png'))
|
||||
.on('done', function(err) {
|
||||
console.log('Pipe finished!');
|
||||
});
|
||||
```
|
||||
|
||||
As you can see, you can use Needle with Promises or without them. When using Promises or when a callback is passed, the response's body will be buffered and written to `response.body`, and the callback will be fired when all of the data has been collected and processed (e.g. decompressed, decoded and/or parsed).
|
||||
|
||||
When no callback is passed, however, the buffering logic will be skipped but the response stream will still go through Needle's processing pipeline, so you get all the benefits of post-processing while keeping the streamishness we all love from Node.
|
||||
|
||||
Response pipeline
|
||||
-----------------
|
||||
|
||||
Depending on the response's Content-Type, Needle will either attempt to parse JSON or XML streams, or, if a text response was received, will ensure that the final encoding you get is UTF-8.
|
||||
|
||||
You can also request a gzip/deflated/brotli response, which, if sent by the server, will be processed before parsing or decoding is performed. (Note: brotli is only supported on Node 10.16.0 or above, and will not be requested or processed on earlier versions.)
|
||||
|
||||
```js
|
||||
needle.get('http://stackoverflow.com/feeds', { compressed: true }, function(err, resp) {
|
||||
console.log(resp.body); // this little guy won't be a Gzipped binary blob
|
||||
// but a nice object containing all the latest entries
|
||||
});
|
||||
```
|
||||
|
||||
Or in anti-callback mode, using a few other options:
|
||||
|
||||
```js
|
||||
var options = {
|
||||
compressed : true, // sets 'Accept-Encoding' to 'gzip, deflate, br'
|
||||
follow_max : 5, // follow up to five redirects
|
||||
rejectUnauthorized : true // verify SSL certificate
|
||||
}
|
||||
|
||||
var stream = needle.get('https://backend.server.com/everything.html', options);
|
||||
|
||||
// read the chunks from the 'readable' event, so the stream gets consumed.
|
||||
stream.on('readable', function() {
|
||||
while (data = this.read()) {
|
||||
console.log(data.toString());
|
||||
}
|
||||
})
|
||||
|
||||
stream.on('done', function(err) {
|
||||
// if our request had an error, our 'done' event will tell us.
|
||||
if (!err) console.log('Great success!');
|
||||
})
|
||||
```
|
||||
|
||||
API
|
||||
---
|
||||
|
||||
### needle(method, url[, data][, options][, callback]) `(> 2.0.x)`
|
||||
|
||||
Calling `needle()` directly returns a Promise. Besides `method` and `url`, all parameters are optional, although when sending a `post`, `put` or `patch` request you will get an error if `data` is not present.
|
||||
|
||||
```js
|
||||
needle('get', 'http://some.url.com')
|
||||
.then(function(resp) { console.log(resp.body) })
|
||||
.catch(function(err) { console.error(err) })
|
||||
```
|
||||
|
||||
Except from the above, all of Needle's request methods return a Readable stream, and both `options` and `callback` are optional. If passed, the callback will return three arguments: `error`, `response` and `body`, which is basically an alias for `response.body`.
|
||||
|
||||
### needle.head(url[, options][, callback])
|
||||
|
||||
```js
|
||||
needle.head('https://my.backend.server.com', {
|
||||
open_timeout: 5000 // if we're not able to open a connection in 5 seconds, boom.
|
||||
}, function(err, resp) {
|
||||
if (err)
|
||||
console.log('Shoot! Something is wrong: ' + err.message)
|
||||
else
|
||||
console.log('Yup, still alive.')
|
||||
})
|
||||
```
|
||||
|
||||
### needle.get(url[, options][, callback])
|
||||
|
||||
```js
|
||||
needle.get('google.com/search?q=syd+barrett', function(err, resp) {
|
||||
// if no http:// is found, Needle will automagically prepend it.
|
||||
});
|
||||
```
|
||||
|
||||
### needle.post(url, data[, options][, callback])
|
||||
|
||||
```js
|
||||
var options = {
|
||||
headers: { 'X-Custom-Header': 'Bumbaway atuna' }
|
||||
}
|
||||
|
||||
needle.post('https://my.app.com/endpoint', 'foo=bar', options, function(err, resp) {
|
||||
// you can pass params as a string or as an object.
|
||||
});
|
||||
```
|
||||
|
||||
### needle.put(url, data[, options][, callback])
|
||||
|
||||
```js
|
||||
var nested = {
|
||||
params: {
|
||||
are: {
|
||||
also: 'supported'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
needle.put('https://api.app.com/v2', nested, function(err, resp) {
|
||||
console.log('Got ' + resp.bytes + ' bytes.') // another nice treat from this handsome fella.
|
||||
});
|
||||
```
|
||||
|
||||
### needle.patch(url, data[, options][, callback])
|
||||
|
||||
Same behaviour as PUT.
|
||||
|
||||
### needle.delete(url, data[, options][, callback])
|
||||
|
||||
```js
|
||||
var options = {
|
||||
username: 'fidelio',
|
||||
password: 'x'
|
||||
}
|
||||
|
||||
needle.delete('https://api.app.com/messages/123', null, options, function(err, resp) {
|
||||
// in this case, data may be null, but you need to explicity pass it.
|
||||
});
|
||||
```
|
||||
|
||||
### needle.request(method, url, data[, options][, callback])
|
||||
|
||||
Generic request. This not only allows for flexibility, but also lets you perform a GET request with data, in which case will be appended to the request as a query string, unless you pass a `json: true` option (read below).
|
||||
|
||||
```js
|
||||
var params = {
|
||||
q : 'a very smart query',
|
||||
page : 2
|
||||
}
|
||||
|
||||
needle.request('get', 'forum.com/search', params, function(err, resp) {
|
||||
if (!err && resp.statusCode == 200)
|
||||
console.log(resp.body); // here you go, mister.
|
||||
});
|
||||
```
|
||||
|
||||
Now, if you set pass `json: true` among the options, Needle won't set your params as a querystring but instead send a JSON representation of your data through the request's body, as well as set the `Content-Type` and `Accept` headers to `application/json`.
|
||||
|
||||
```js
|
||||
needle.request('get', 'forum.com/search', params, { json: true }, function(err, resp) {
|
||||
if (resp.statusCode == 200) console.log('It worked!');
|
||||
});
|
||||
```
|
||||
|
||||
Events
|
||||
------
|
||||
|
||||
The [Readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable) object returned by the above request methods emits the following events, in addition to the regular ones (e.g. `end`, `close`, `data`, `pipe`, `readable`).
|
||||
|
||||
### Event: `'response'`
|
||||
|
||||
- `response <http.IncomingMessage>`
|
||||
|
||||
Emitted when the underlying [http.ClientRequest](https://nodejs.org/api/http.html#http_class_http_clientrequest) emits a response event. This is after the connection is established and the header received, but before any of it is processed (e.g. authorization required or redirect to be followed). No data has been consumed at this point.
|
||||
|
||||
### Event: `'redirect'`
|
||||
|
||||
- `location <String>`
|
||||
|
||||
Indicates that the a redirect is being followed. This means that the response code was a redirect (`301`, `302`, `303`, `307`) and the given [redirect options](#redirect-options) allowed following the URL received in the `Location` header.
|
||||
|
||||
### Event: `'header'`
|
||||
|
||||
- `statusCode <Integer>`
|
||||
- `headers <Object>`
|
||||
|
||||
Triggered after the header has been processed, and just before the data is to be consumed. This implies that no redirect was followed and/or authentication header was received. In other words, we got a "valid" response.
|
||||
|
||||
### Event: `'done'` (previously 'end')
|
||||
|
||||
- `exception <Error>` (optional)
|
||||
|
||||
Emitted when the request/response process has finished, either because all data was consumed or an error ocurred somewhere in between. Unlike a regular stream's `end` event, Needle's `done` will be fired either on success or on failure, which is why the first argument may be an Error object. In other words:
|
||||
|
||||
```js
|
||||
var resp = needle.get('something.worthy/of/being/streamed/by/needle');
|
||||
resp.pipe(someWritableStream);
|
||||
|
||||
resp.on('done', function(err) {
|
||||
if (err) console.log('An error ocurred: ' + err.message);
|
||||
else console.log('Great success!');
|
||||
})
|
||||
```
|
||||
|
||||
### Event: `'err'`
|
||||
|
||||
- `exception <Error>`
|
||||
|
||||
Emitted when an error ocurrs. This should only happen once in the lifecycle of a Needle request.
|
||||
|
||||
### Event: `'timeout'`
|
||||
|
||||
- `type <String>`
|
||||
|
||||
Emitted when an timeout error occurs. Type can be either 'open', 'response', or 'read'. This will called right before aborting the request, which will also trigger an `err` event, a described above, with an `ECONNRESET` (Socket hang up) exception.
|
||||
|
||||
Request options
|
||||
---------------
|
||||
|
||||
For information about options that've changed, there's always [the changelog](https://github.com/tomas/needle/releases).
|
||||
|
||||
- `agent` : Uses an [http.Agent](https://nodejs.org/api/http.html#http_class_http_agent) of your choice, instead of the global, default one. Useful for tweaking the behaviour at the connection level, such as when doing tunneling (see below for an example).
|
||||
- `json` : When `true`, sets content type to `application/json` and sends request body as JSON string, instead of a query string.
|
||||
- `open_timeout`: (or `timeout`) Returns error if connection takes longer than X milisecs to establish. Defaults to `10000` (10 secs). `0` means no timeout.
|
||||
- `response_timeout`: Returns error if no response headers are received in X milisecs, counting from when the connection is opened. Defaults to `0` (no response timeout).
|
||||
- `read_timeout`: Returns error if data transfer takes longer than X milisecs, once response headers are received. Defaults to `0` (no timeout).
|
||||
- `follow_max` : (or `follow`) Number of redirects to follow. Defaults to `0`. See below for more redirect options.
|
||||
- `multipart` : Enables multipart/form-data encoding. Defaults to `false`. Use it when uploading files.
|
||||
- `proxy` : Forwards request through HTTP(s) proxy. Eg. `proxy: 'http://user:pass@proxy.server.com:3128'`. For more advanced proxying/tunneling use a custom `agent`, as described below.
|
||||
- `headers` : Object containing custom HTTP headers for request. Overrides defaults described below.
|
||||
- `auth` : Determines what to do with provided username/password. Options are `auto`, `digest` or `basic` (default). `auto` will detect the type of authentication depending on the response headers.
|
||||
- `stream_length`: When sending streams, this lets you manually set the Content-Length header --if the stream's bytecount is known beforehand--, preventing ECONNRESET (socket hang up) errors on some servers that misbehave when receiving payloads of unknown size. Set it to `0` and Needle will get and set the stream's length for you, or leave unset for the default behaviour, which is no Content-Length header for stream payloads.
|
||||
- `localAddress`: <string>, IP address. Passed to http/https request. Local interface from which the request should be emitted.
|
||||
- `uri_modifier`: Anonymous function taking request (or redirect location if following redirects) URI as an argument and modifying it given logic. It has to return a valid URI string for successful request.
|
||||
|
||||
Response options
|
||||
----------------
|
||||
|
||||
- `decode_response` : (or `decode`) Whether to decode the text responses to UTF-8, if Content-Type header shows a different charset. Defaults to `true`.
|
||||
- `parse_response` : (or `parse`) Whether to parse XML or JSON response bodies automagically. Defaults to `true`. You can also set this to 'xml' or 'json' in which case Needle will *only* parse the response if the content type matches.
|
||||
- `output` : Dump response output to file. This occurs after parsing and charset decoding is done.
|
||||
- `parse_cookies` : Whether to parse response’s `Set-Cookie` header. Defaults to `true`. If parsed, response cookies will be available at `resp.cookies`.
|
||||
|
||||
HTTP Header options
|
||||
-------------------
|
||||
|
||||
These are basically shortcuts to the `headers` option described above.
|
||||
|
||||
- `cookies` : Builds and sets a Cookie header from a `{ key: 'value' }` object.
|
||||
- `compressed`: If `true`, sets 'Accept-Encoding' header to 'gzip,deflate', and inflates content if zipped. Defaults to `false`.
|
||||
- `username` : For HTTP basic auth.
|
||||
- `password` : For HTTP basic auth. Requires username to be passed, but is optional.
|
||||
- `accept` : Sets 'Accept' HTTP header. Defaults to `*/*`.
|
||||
- `connection`: Sets 'Connection' HTTP header. Not set by default, unless running Node < 0.11.4 in which case it defaults to `close`. More info about this below.
|
||||
- `user_agent`: Sets the 'User-Agent' HTTP header. Defaults to `Needle/{version} (Node.js {node_version})`.
|
||||
- `content_type`: Sets the 'Content-Type' header. Unset by default, unless you're sending data in which case it's set accordingly to whatever is being sent (`application/x-www-form-urlencoded`, `application/json` or `multipart/form-data`). That is, of course, unless the option is passed, either here or through `options.headers`. You're the boss.
|
||||
|
||||
Node.js TLS Options
|
||||
-------------------
|
||||
|
||||
These options are passed directly to `https.request` if present. Taken from the [original documentation](http://nodejs.org/docs/latest/api/https.html):
|
||||
|
||||
- `pfx` : Certificate, Private key and CA certificates to use for SSL.
|
||||
- `key` : Private key to use for SSL.
|
||||
- `passphrase` : A string of passphrase for the private key or pfx.
|
||||
- `cert` : Public x509 certificate to use.
|
||||
- `ca` : An authority certificate or array of authority certificates to check the remote host against.
|
||||
- `ciphers` : A string describing the ciphers to use or exclude.
|
||||
- `rejectUnauthorized` : If true, the server certificate is verified against the list of supplied CAs. An 'error' event is emitted if verification fails. Verification happens at the connection level, before the HTTP request is sent.
|
||||
- `secureProtocol` : The SSL method to use, e.g. SSLv3_method to force SSL version 3.
|
||||
- `family` : IP address family to use when resolving host and hostname. Valid values are 4 or 6. When unspecified, both IP v4 and v6 will be used.
|
||||
|
||||
Redirect options
|
||||
----------------
|
||||
|
||||
These options only apply if the `follow_max` (or `follow`) option is higher than 0.
|
||||
|
||||
- `follow_set_cookies` : Sends the cookies received in the `set-cookie` header as part of the following request. `false` by default.
|
||||
- `follow_set_referer` : Sets the 'Referer' header to the requested URI when following a redirect. `false` by default.
|
||||
- `follow_keep_method` : If enabled, resends the request using the original verb instead of being rewritten to `get` with no data. `false` by default.
|
||||
- `follow_if_same_host` : When true, Needle will only follow redirects that point to the same host as the original request. `false` by default.
|
||||
- `follow_if_same_protocol` : When true, Needle will only follow redirects that point to the same protocol as the original request. `false` by default.
|
||||
- `follow_if_same_location` : Unless true, Needle will not follow redirects that point to same location (as set in the response header) as the original request URL. `false` by default.
|
||||
|
||||
Overriding Defaults
|
||||
-------------------
|
||||
|
||||
Yes sir, we have it. Needle includes a `defaults()` method, that lets you override some of the defaults for all future requests. Like this:
|
||||
|
||||
```js
|
||||
needle.defaults({
|
||||
open_timeout: 60000,
|
||||
user_agent: 'MyApp/1.2.3',
|
||||
parse_response: false });
|
||||
```
|
||||
|
||||
This will override Needle's default user agent and 10-second timeout, and disable response parsing, so you don't need to pass those options in every other request.
|
||||
|
||||
More advanced Proxy support
|
||||
---------------------------
|
||||
|
||||
Since you can pass a custom HTTPAgent to Needle you can do all sorts of neat stuff. For example, if you want to use the [`tunnel`](https://github.com/koichik/node-tunnel) module for HTTPS proxying, you can do this:
|
||||
|
||||
```js
|
||||
var tunnel = require('tunnel');
|
||||
var myAgent = tunnel.httpOverHttp({
|
||||
proxy: { host: 'localhost' }
|
||||
});
|
||||
|
||||
needle.get('foobar.com', { agent: myAgent });
|
||||
```
|
||||
|
||||
Otherwise, you can use the [`hpagent`](https://github.com/delvedor/hpagent) package, which keeps the internal sockets alive to be reused.
|
||||
|
||||
```js
|
||||
const { HttpsProxyAgent } = require('hpagent');
|
||||
needle('get', 'https://localhost:9200', {
|
||||
agent: new HttpsProxyAgent({
|
||||
keepAlive: true,
|
||||
keepAliveMsecs: 1000,
|
||||
maxSockets: 256,
|
||||
maxFreeSockets: 256,
|
||||
scheduling: 'lifo',
|
||||
proxy: 'https://localhost:8080'
|
||||
})
|
||||
});
|
||||
```
|
||||
|
||||
Regarding the 'Connection' header
|
||||
---------------------------------
|
||||
|
||||
Unless you're running an old version of Node (< 0.11.4), by default Needle won't set the Connection header on requests, yielding Node's default behaviour of keeping the connection alive with the target server. This speeds up immensely the process of sending several requests to the same host.
|
||||
|
||||
On older versions, however, this has the unwanted behaviour of preventing the runtime from exiting, either because of a bug or 'feature' that was changed on 0.11.4. To overcome this Needle does set the 'Connection' header to 'close' on those versions, however this also means that making new requests to the same host doesn't benefit from Keep-Alive.
|
||||
|
||||
So if you're stuck on 0.10 or even lower and want full speed, you can simply set the Connection header to 'Keep-Alive' by using `{ connection: 'Keep-Alive' }`. Please note, though, that an event loop handler will prevent the runtime from exiting so you'll need to manually call `process.exit()` or the universe will collapse.
|
||||
|
||||
Examples Galore
|
||||
---------------
|
||||
|
||||
### HTTPS GET with Basic Auth
|
||||
|
||||
```js
|
||||
needle.get('https://api.server.com', { username: 'you', password: 'secret' },
|
||||
function(err, resp) {
|
||||
// used HTTP auth
|
||||
});
|
||||
```
|
||||
|
||||
Or use [RFC-1738](http://tools.ietf.org/html/rfc1738#section-3.1) basic auth URL syntax:
|
||||
|
||||
```js
|
||||
needle.get('https://username:password@api.server.com', function(err, resp) {
|
||||
// used HTTP auth from URL
|
||||
});
|
||||
```
|
||||
|
||||
### Digest Auth
|
||||
|
||||
```js
|
||||
needle.get('other.server.com', { username: 'you', password: 'secret', auth: 'digest' },
|
||||
function(err, resp, body) {
|
||||
// needle prepends 'http://' to your URL, if missing
|
||||
});
|
||||
```
|
||||
|
||||
### Custom Accept header, deflate
|
||||
|
||||
```js
|
||||
var options = {
|
||||
compressed : true,
|
||||
follow : 10,
|
||||
accept : 'application/vnd.github.full+json'
|
||||
}
|
||||
|
||||
needle.get('api.github.com/users/tomas', options, function(err, resp, body) {
|
||||
// body will contain a JSON.parse(d) object
|
||||
// if parsing fails, you'll simply get the original body
|
||||
});
|
||||
```
|
||||
|
||||
### GET XML object
|
||||
|
||||
```js
|
||||
needle.get('https://news.ycombinator.com/rss', function(err, resp, body) {
|
||||
// you'll get a nice object containing the nodes in the RSS
|
||||
});
|
||||
```
|
||||
|
||||
### GET binary, output to file
|
||||
|
||||
```js
|
||||
needle.get('http://upload.server.com/tux.png', { output: '/tmp/tux.png' }, function(err, resp, body) {
|
||||
// you can dump any response to a file, not only binaries.
|
||||
});
|
||||
```
|
||||
|
||||
### GET through proxy
|
||||
|
||||
```js
|
||||
needle.get('http://search.npmjs.org', { proxy: 'http://localhost:1234' }, function(err, resp, body) {
|
||||
// request passed through proxy
|
||||
});
|
||||
```
|
||||
|
||||
### GET a very large document in a stream (from 0.7+)
|
||||
|
||||
```js
|
||||
var stream = needle.get('http://www.as35662.net/100.log');
|
||||
|
||||
stream.on('readable', function() {
|
||||
var chunk;
|
||||
while (chunk = this.read()) {
|
||||
console.log('got data: ', chunk);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### GET JSON object in a stream (from 0.7+)
|
||||
|
||||
```js
|
||||
var stream = needle.get('http://jsonplaceholder.typicode.com/db', { parse: true });
|
||||
|
||||
stream.on('readable', function() {
|
||||
var node;
|
||||
|
||||
// our stream will only emit a single JSON root node.
|
||||
while (node = this.read()) {
|
||||
console.log('got data: ', node);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### GET JSONStream flexible parser with search query (from 0.7+)
|
||||
|
||||
```js
|
||||
|
||||
// The 'data' element of this stream will be the string representation
|
||||
// of the titles of all posts.
|
||||
|
||||
needle.get('http://jsonplaceholder.typicode.com/db', { parse: true })
|
||||
.pipe(new JSONStream.parse('posts.*.title'));
|
||||
.on('data', function (obj) {
|
||||
console.log('got post title: %s', obj);
|
||||
});
|
||||
```
|
||||
|
||||
### File upload using multipart, passing file path
|
||||
|
||||
```js
|
||||
var data = {
|
||||
foo: 'bar',
|
||||
image: { file: '/home/tomas/linux.png', content_type: 'image/png' }
|
||||
}
|
||||
|
||||
needle.post('http://my.other.app.com', data, { multipart: true }, function(err, resp, body) {
|
||||
// needle will read the file and include it in the form-data as binary
|
||||
});
|
||||
```
|
||||
|
||||
### Stream upload, PUT or POST
|
||||
|
||||
``` js
|
||||
needle.put('https://api.app.com/v2', fs.createReadStream('myfile.txt'), function(err, resp, body) {
|
||||
// stream content is uploaded verbatim
|
||||
});
|
||||
```
|
||||
|
||||
### Multipart POST, passing data buffer
|
||||
|
||||
```js
|
||||
var buffer = fs.readFileSync('/path/to/package.zip');
|
||||
|
||||
var data = {
|
||||
zip_file: {
|
||||
buffer : buffer,
|
||||
filename : 'mypackage.zip',
|
||||
content_type : 'application/octet-stream'
|
||||
}
|
||||
}
|
||||
|
||||
needle.post('http://somewhere.com/over/the/rainbow', data, { multipart: true }, function(err, resp, body) {
|
||||
// if you see, when using buffers we need to pass the filename for the multipart body.
|
||||
// you can also pass a filename when using the file path method, in case you want to override
|
||||
// the default filename to be received on the other end.
|
||||
});
|
||||
```
|
||||
|
||||
### Multipart with custom Content-Type
|
||||
|
||||
```js
|
||||
var data = {
|
||||
token: 'verysecret',
|
||||
payload: {
|
||||
value: JSON.stringify({ title: 'test', version: 1 }),
|
||||
content_type: 'application/json'
|
||||
}
|
||||
}
|
||||
|
||||
needle.post('http://test.com/', data, { timeout: 5000, multipart: true }, function(err, resp, body) {
|
||||
// in this case, if the request takes more than 5 seconds
|
||||
// the callback will return a [Socket closed] error
|
||||
});
|
||||
```
|
||||
|
||||
For even more examples, check out the examples directory in the repo.
|
||||
|
||||
### Testing
|
||||
|
||||
To run tests, you need to generate a self-signed SSL certificate in the `test` directory. After cloning the repository, run the following commands:
|
||||
|
||||
$ mkdir -p test/keys
|
||||
$ openssl genrsa -out test/keys/ssl.key 2048
|
||||
$ openssl req -new -key test/keys/ssl.key -x509 -days 999 -out test/keys/ssl.cert
|
||||
|
||||
Then you should be able to run `npm test` once you have the dependencies in place.
|
||||
|
||||
> Note: Tests currently only work on linux-based environments that have `/proc/self/fd`. They *do not* work on MacOS environments.
|
||||
> You can use Docker to run tests by creating a container and mounting the needle project directory on `/app`
|
||||
> `docker create --name Needle -v /app -w /app -v /app/node_modules -i node:argon`
|
||||
|
||||
Credits
|
||||
-------
|
||||
|
||||
Written by Tomás Pollak, with the help of contributors.
|
||||
|
||||
Copyright
|
||||
---------
|
||||
|
||||
(c) Fork Ltd. Licensed under the MIT license.
|
40
node_modules/needle/bin/needle
generated
vendored
40
node_modules/needle/bin/needle
generated
vendored
|
@ -1,40 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
var needle = require('./../lib/needle');
|
||||
|
||||
function exit(code, str) {
|
||||
console.log(str) || process.exit(code);
|
||||
}
|
||||
|
||||
function usage() {
|
||||
var out = ['Usage: needle [get|head|post|put|delete] url [query]'];
|
||||
out.push('Examples: \n needle get google.com\n needle post server.com/api foo=bar');
|
||||
exit(1, out.join('\n'))
|
||||
}
|
||||
|
||||
if (process.argv[2] == '-v' || process.argv[2] == '--version')
|
||||
exit(0, needle.version);
|
||||
else if (process.argv[2] == null)
|
||||
usage();
|
||||
|
||||
var method = process.argv[2],
|
||||
url = process.argv[3],
|
||||
options = { compressed: true, parse_response: true, follow_max: 5, timeout: 10000 };
|
||||
|
||||
if (!needle[method]) {
|
||||
url = method;
|
||||
method = 'get';
|
||||
}
|
||||
|
||||
var callback = function(err, resp) {
|
||||
if (err) return exit(1, "Error: " + err.message);
|
||||
|
||||
if (process.argv.indexOf('-i') != -1)
|
||||
console.log(resp.headers) || console.log('');
|
||||
|
||||
console.log(resp.body.toString());
|
||||
};
|
||||
|
||||
if (method == 'post' || method == 'put')
|
||||
needle[method](url, process.argv[4], options, callback);
|
||||
else
|
||||
needle[method](url, options, callback);
|
22
node_modules/needle/examples/deflated-stream.js
generated
vendored
22
node_modules/needle/examples/deflated-stream.js
generated
vendored
|
@ -1,22 +0,0 @@
|
|||
var fs = require('fs'),
|
||||
stream = require('stream'),
|
||||
needle = require('./../');
|
||||
|
||||
var url = 'http://ibl.gamechaser.net/f/tagqfxtteucbuldhezkz/bt_level1.gz';
|
||||
|
||||
var resp = needle.get(url, { compressed: true, follow_max: 10 });
|
||||
console.log('Downloading...');
|
||||
|
||||
resp.on('readable', function() {
|
||||
|
||||
while (data = this.read()) {
|
||||
var lines = data.toString().split('\n');
|
||||
console.log('Got ' + lines.length + ' items.');
|
||||
// console.log(lines);
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
resp.on('done', function(data) {
|
||||
console.log('Done');
|
||||
})
|
16
node_modules/needle/examples/digest-auth.js
generated
vendored
16
node_modules/needle/examples/digest-auth.js
generated
vendored
|
@ -1,16 +0,0 @@
|
|||
var needle = require('./..');
|
||||
|
||||
var opts = {
|
||||
username: 'user3',
|
||||
password: 'user3',
|
||||
auth: 'digest'
|
||||
}
|
||||
|
||||
needle.get('http://test.webdav.org/auth-digest/', opts, function(err, resp, body) {
|
||||
console.log(resp.headers);
|
||||
|
||||
if (resp.statusCode == 401)
|
||||
console.log('\nIt failed.')
|
||||
else
|
||||
console.log('\nIt worked!')
|
||||
});
|
18
node_modules/needle/examples/download-to-file.js
generated
vendored
18
node_modules/needle/examples/download-to-file.js
generated
vendored
|
@ -1,18 +0,0 @@
|
|||
var fs = require('fs'),
|
||||
needle = require('./..'),
|
||||
path = require('path');
|
||||
|
||||
var url = process.argv[2] || 'https://upload.wikimedia.org/wikipedia/commons/a/af/Tux.png';
|
||||
var file = path.basename(url);
|
||||
|
||||
console.log('Downloading ' + file);
|
||||
|
||||
needle.get(url, { output: file, follow: 3 }, function(err, resp, data){
|
||||
console.log('File saved: ' + process.cwd() + '/' + file);
|
||||
|
||||
var size = fs.statSync(file).size;
|
||||
if (size == resp.bytes)
|
||||
console.log(resp.bytes + ' bytes written to file.');
|
||||
else
|
||||
throw new Error('File size mismatch: ' + size + ' != ' + resp.bytes);
|
||||
});
|
25
node_modules/needle/examples/multipart-stream.js
generated
vendored
25
node_modules/needle/examples/multipart-stream.js
generated
vendored
|
@ -1,25 +0,0 @@
|
|||
var needle = require('./../');
|
||||
|
||||
var url = 'http://posttestserver.com/post.php?dir=needle';
|
||||
|
||||
var black_pixel = Buffer.from("R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs=", 'base64');
|
||||
|
||||
var data = {
|
||||
foo: 'bar',
|
||||
nested: {
|
||||
test: 123
|
||||
},
|
||||
image: { buffer: black_pixel, content_type: 'image/gif' }
|
||||
}
|
||||
|
||||
var resp = needle.post(url, data, { multipart: true });
|
||||
|
||||
resp.on('readable', function() {
|
||||
while (data = this.read()) {
|
||||
console.log(data.toString());
|
||||
}
|
||||
})
|
||||
|
||||
resp.on('done', function(data) {
|
||||
console.log('Done.');
|
||||
})
|
23
node_modules/needle/examples/parsed-stream.js
generated
vendored
23
node_modules/needle/examples/parsed-stream.js
generated
vendored
|
@ -1,23 +0,0 @@
|
|||
//////////////////////////////////////////
|
||||
// This example demonstrates what happends
|
||||
// when you use the built-in JSON parser.
|
||||
//////////////////////////////////////////
|
||||
|
||||
var fs = require('fs'),
|
||||
stream = require('stream'),
|
||||
needle = require('./../');
|
||||
|
||||
var url = 'http://ip.jsontest.com/',
|
||||
resp = needle.get(url, { parse: true });
|
||||
|
||||
resp.on('readable', function(obj) {
|
||||
var chunk;
|
||||
|
||||
while (chunk = this.read()) {
|
||||
console.log('root = ', chunk);
|
||||
}
|
||||
});
|
||||
|
||||
resp.on('done', function() {
|
||||
console.log('Done.');
|
||||
});
|
21
node_modules/needle/examples/parsed-stream2.js
generated
vendored
21
node_modules/needle/examples/parsed-stream2.js
generated
vendored
|
@ -1,21 +0,0 @@
|
|||
//////////////////////////////////////////
|
||||
// This example illustrates a more complex
|
||||
// example of parsing a JSON stream.
|
||||
//////////////////////////////////////////
|
||||
|
||||
var needle = require('./../'),
|
||||
JSONStream = require('JSONStream');
|
||||
|
||||
var url = 'http://jsonplaceholder.typicode.com/db';
|
||||
|
||||
// Initialize our GET request with our default (JSON)
|
||||
// parsers disabled.
|
||||
|
||||
var json = new needle.get(url, {parse: false})
|
||||
// And now interpret the stream as JSON, returning only the
|
||||
// title of all the posts.
|
||||
.pipe(new JSONStream.parse('posts.*.title'));
|
||||
|
||||
json.on('data', function (obj) {
|
||||
console.log('got title: \'' + obj + '\'');
|
||||
})
|
23
node_modules/needle/examples/stream-events.js
generated
vendored
23
node_modules/needle/examples/stream-events.js
generated
vendored
|
@ -1,23 +0,0 @@
|
|||
var needle = require('./..');
|
||||
|
||||
var resp = needle.get('google.com', { follow_max: 10, timeout: 5000 });
|
||||
|
||||
resp.on('readable', function() {
|
||||
var chunk;
|
||||
while (chunk = this.read()) {
|
||||
console.log('Got ' + chunk.length + ' bytes');
|
||||
}
|
||||
})
|
||||
|
||||
resp.on('headers', function(headers) {
|
||||
console.log('Got headers', headers);
|
||||
})
|
||||
|
||||
resp.on('redirect', function(url) {
|
||||
console.log('Redirected to url ' + url);
|
||||
})
|
||||
|
||||
resp.on('done', function(err) {
|
||||
console.log('Finished. No more data to receive.');
|
||||
if (err) console.log('With error', err)
|
||||
})
|
14
node_modules/needle/examples/stream-to-file.js
generated
vendored
14
node_modules/needle/examples/stream-to-file.js
generated
vendored
|
@ -1,14 +0,0 @@
|
|||
var fs = require('fs'),
|
||||
needle = require('./..'),
|
||||
path = require('path');
|
||||
|
||||
var url = process.argv[2] || 'http://www.google.com/images/errors/robot.png';
|
||||
var file = path.basename(url);
|
||||
|
||||
console.log('Downloading ' + file + '...');
|
||||
needle
|
||||
.get(url)
|
||||
.pipe(fs.createWriteStream(file))
|
||||
.on('done', function() {
|
||||
console.log('Done!')
|
||||
})
|
51
node_modules/needle/examples/upload-image.js
generated
vendored
51
node_modules/needle/examples/upload-image.js
generated
vendored
|
@ -1,51 +0,0 @@
|
|||
var needle = require('../'),
|
||||
path = require('path');
|
||||
|
||||
var image = 'https://upload.wikimedia.org/wikipedia/commons/a/af/Tux.png';
|
||||
|
||||
function upload(obj, cb) {
|
||||
console.log('Uploading image...');
|
||||
|
||||
var url = 'http://deviantsart.com';
|
||||
|
||||
var opts = {
|
||||
timeout: 10000,
|
||||
follow: 3,
|
||||
multipart: true
|
||||
};
|
||||
|
||||
var params = {
|
||||
file: obj
|
||||
}
|
||||
|
||||
needle.post(url, params, opts, function(err, resp) {
|
||||
if (err || !resp.body.match('url'))
|
||||
return cb(err || new Error('No image URL found.'))
|
||||
|
||||
cb(null, JSON.parse(resp.body).url)
|
||||
})
|
||||
}
|
||||
|
||||
function download(url, cb) {
|
||||
console.log('Getting ' + url);
|
||||
needle.get(url, function(err, resp) {
|
||||
if (err) throw err;
|
||||
|
||||
cb(null, resp.body);
|
||||
})
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
// ok, now go.
|
||||
|
||||
download(image, function(err, buffer) {
|
||||
if (err) throw err;
|
||||
|
||||
var obj = { buffer: buffer, content_type: 'image/png' };
|
||||
|
||||
upload(obj, function(err, url) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Image uploaded to ' + url);
|
||||
})
|
||||
})
|
112
node_modules/needle/lib/auth.js
generated
vendored
112
node_modules/needle/lib/auth.js
generated
vendored
|
@ -1,112 +0,0 @@
|
|||
var createHash = require('crypto').createHash;
|
||||
|
||||
function get_header(header, credentials, opts) {
|
||||
var type = header.split(' ')[0],
|
||||
user = credentials[0],
|
||||
pass = credentials[1];
|
||||
|
||||
if (type == 'Digest') {
|
||||
return digest.generate(header, user, pass, opts.method, opts.path);
|
||||
} else if (type == 'Basic') {
|
||||
return basic(user, pass);
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////
|
||||
// basic
|
||||
|
||||
function md5(string) {
|
||||
return createHash('md5').update(string).digest('hex');
|
||||
}
|
||||
|
||||
function basic(user, pass) {
|
||||
var str = typeof pass == 'undefined' ? user : [user, pass].join(':');
|
||||
return 'Basic ' + Buffer.from(str).toString('base64');
|
||||
}
|
||||
|
||||
////////////////////
|
||||
// digest
|
||||
// logic inspired from https://github.com/simme/node-http-digest-client
|
||||
|
||||
var digest = {};
|
||||
|
||||
digest.parse_header = function(header) {
|
||||
var challenge = {},
|
||||
matches = header.match(/([a-z0-9_-]+)="?([a-z0-9_=\/\.@\s-\+)()]+)"?/gi);
|
||||
|
||||
for (var i = 0, l = matches.length; i < l; i++) {
|
||||
var parts = matches[i].split('='),
|
||||
key = parts.shift(),
|
||||
val = parts.join('=').replace(/^"/, '').replace(/"$/, '');
|
||||
|
||||
challenge[key] = val;
|
||||
}
|
||||
|
||||
return challenge;
|
||||
}
|
||||
|
||||
digest.update_nc = function(nc) {
|
||||
var max = 99999999;
|
||||
nc++;
|
||||
|
||||
if (nc > max)
|
||||
nc = 1;
|
||||
|
||||
var padding = new Array(8).join('0') + '';
|
||||
nc = nc + '';
|
||||
return padding.substr(0, 8 - nc.length) + nc;
|
||||
}
|
||||
|
||||
digest.generate = function(header, user, pass, method, path) {
|
||||
|
||||
var nc = 1,
|
||||
cnonce = null,
|
||||
challenge = digest.parse_header(header);
|
||||
|
||||
var ha1 = md5(user + ':' + challenge.realm + ':' + pass),
|
||||
ha2 = md5(method.toUpperCase() + ':' + path),
|
||||
resp = [ha1, challenge.nonce];
|
||||
|
||||
if (typeof challenge.qop === 'string') {
|
||||
cnonce = md5(Math.random().toString(36)).substr(0, 8);
|
||||
nc = digest.update_nc(nc);
|
||||
resp = resp.concat(nc, cnonce);
|
||||
resp = resp.concat(challenge.qop, ha2);
|
||||
} else {
|
||||
resp = resp.concat(ha2);
|
||||
}
|
||||
|
||||
|
||||
var params = {
|
||||
uri : path,
|
||||
realm : challenge.realm,
|
||||
nonce : challenge.nonce,
|
||||
username : user,
|
||||
response : md5(resp.join(':'))
|
||||
}
|
||||
|
||||
if (challenge.qop) {
|
||||
params.qop = challenge.qop;
|
||||
}
|
||||
|
||||
if (challenge.opaque) {
|
||||
params.opaque = challenge.opaque;
|
||||
}
|
||||
|
||||
if (cnonce) {
|
||||
params.nc = nc;
|
||||
params.cnonce = cnonce;
|
||||
}
|
||||
|
||||
header = []
|
||||
for (var k in params)
|
||||
header.push(k + '="' + params[k] + '"')
|
||||
|
||||
return 'Digest ' + header.join(', ');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
header : get_header,
|
||||
basic : basic,
|
||||
digest : digest.generate
|
||||
}
|
79
node_modules/needle/lib/cookies.js
generated
vendored
79
node_modules/needle/lib/cookies.js
generated
vendored
|
@ -1,79 +0,0 @@
|
|||
|
||||
// Simple cookie handling implementation based on the standard RFC 6265.
|
||||
//
|
||||
// This module just has two functionalities:
|
||||
// - Parse a set-cookie-header as a key value object
|
||||
// - Write a cookie-string from a key value object
|
||||
//
|
||||
// All cookie attributes are ignored.
|
||||
|
||||
var unescape = require('querystring').unescape;
|
||||
|
||||
var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/;
|
||||
var EXCLUDED_CHARS = /[\x00-\x1F\x7F\x3B\x3B\s\"\,\\"%]/g;
|
||||
var TRAILING_SEMICOLON = /\x3B+$/;
|
||||
var SEP_SEMICOLON = /\s*\x3B\s*/;
|
||||
|
||||
// i know these should be 'const', but I'd like to keep
|
||||
// supporting earlier node.js versions as long as I can. :)
|
||||
|
||||
var KEY_INDEX = 1; // index of key from COOKIE_PAIR match
|
||||
var VALUE_INDEX = 3; // index of value from COOKIE_PAIR match
|
||||
|
||||
// Returns a copy str trimmed and without trainling semicolon.
|
||||
function cleanCookieString(str) {
|
||||
return str.trim().replace(/\x3B+$/, '');
|
||||
}
|
||||
|
||||
function getFirstPair(str) {
|
||||
var index = str.indexOf('\x3B');
|
||||
return index === -1 ? str : str.substr(0, index);
|
||||
}
|
||||
|
||||
// Returns a encoded copy of str based on RFC6265 S4.1.1.
|
||||
function encodeCookieComponent(str) {
|
||||
return str.toString().replace(EXCLUDED_CHARS, encodeURIComponent);
|
||||
}
|
||||
|
||||
// Parses a set-cookie-string based on the standard defined in RFC6265 S4.1.1.
|
||||
function parseSetCookieString(str) {
|
||||
str = cleanCookieString(str);
|
||||
str = getFirstPair(str);
|
||||
|
||||
var res = COOKIE_PAIR.exec(str);
|
||||
if (!res || !res[VALUE_INDEX]) return null;
|
||||
|
||||
return {
|
||||
name : unescape(res[KEY_INDEX]),
|
||||
value : unescape(res[VALUE_INDEX])
|
||||
};
|
||||
}
|
||||
|
||||
// Parses a set-cookie-header and returns a key/value object.
|
||||
// Each key represents the name of a cookie.
|
||||
function parseSetCookieHeader(header) {
|
||||
if (!header) return {};
|
||||
header = Array.isArray(header) ? header : [header];
|
||||
|
||||
return header.reduce(function(res, str) {
|
||||
var cookie = parseSetCookieString(str);
|
||||
if (cookie) res[cookie.name] = cookie.value;
|
||||
return res;
|
||||
}, {});
|
||||
}
|
||||
|
||||
// Writes a set-cookie-string based on the standard definded in RFC6265 S4.1.1.
|
||||
function writeCookieString(obj) {
|
||||
return Object.keys(obj).reduce(function(str, name) {
|
||||
var encodedName = encodeCookieComponent(name);
|
||||
var encodedValue = encodeCookieComponent(obj[name]);
|
||||
str += (str ? '; ' : '') + encodedName + '=' + encodedValue;
|
||||
return str;
|
||||
}, '');
|
||||
}
|
||||
|
||||
// returns a key/val object from an array of cookie strings
|
||||
exports.read = parseSetCookieHeader;
|
||||
|
||||
// writes a cookie string header
|
||||
exports.write = writeCookieString;
|
53
node_modules/needle/lib/decoder.js
generated
vendored
53
node_modules/needle/lib/decoder.js
generated
vendored
|
@ -1,53 +0,0 @@
|
|||
var iconv,
|
||||
inherits = require('util').inherits,
|
||||
stream = require('stream');
|
||||
|
||||
var regex = /(?:charset|encoding)\s*=\s*['"]? *([\w\-]+)/i;
|
||||
|
||||
inherits(StreamDecoder, stream.Transform);
|
||||
|
||||
function StreamDecoder(charset) {
|
||||
if (!(this instanceof StreamDecoder))
|
||||
return new StreamDecoder(charset);
|
||||
|
||||
stream.Transform.call(this, charset);
|
||||
this.charset = charset;
|
||||
this.parsed_chunk = false;
|
||||
}
|
||||
|
||||
StreamDecoder.prototype._transform = function(chunk, encoding, done) {
|
||||
var res, found;
|
||||
|
||||
// try get charset from chunk, just once
|
||||
if (this.charset == 'utf8' && !this.parsed_chunk) {
|
||||
this.parsed_chunk = true;
|
||||
|
||||
var matches = regex.exec(chunk.toString());
|
||||
if (matches) {
|
||||
found = matches[1].toLowerCase();
|
||||
this.charset = found == 'utf-8' ? 'utf8' : found;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
res = iconv.decode(chunk, this.charset);
|
||||
} catch(e) { // something went wrong, just return original chunk
|
||||
res = chunk;
|
||||
}
|
||||
|
||||
this.push(res);
|
||||
done();
|
||||
}
|
||||
|
||||
module.exports = function(charset) {
|
||||
try {
|
||||
if (!iconv) iconv = require('iconv-lite');
|
||||
} catch(e) {
|
||||
/* iconv not found */
|
||||
}
|
||||
|
||||
if (iconv)
|
||||
return new StreamDecoder(charset);
|
||||
else
|
||||
return new stream.PassThrough;
|
||||
}
|
98
node_modules/needle/lib/multipart.js
generated
vendored
98
node_modules/needle/lib/multipart.js
generated
vendored
|
@ -1,98 +0,0 @@
|
|||
var readFile = require('fs').readFile,
|
||||
basename = require('path').basename;
|
||||
|
||||
exports.build = function(data, boundary, callback) {
|
||||
|
||||
if (typeof data != 'object' || typeof data.pipe == 'function')
|
||||
return callback(new Error('Multipart builder expects data as key/val object.'));
|
||||
|
||||
var body = '',
|
||||
object = flatten(data),
|
||||
count = Object.keys(object).length;
|
||||
|
||||
if (count === 0)
|
||||
return callback(new Error('Empty multipart body. Invalid data.'))
|
||||
|
||||
function done(err, section) {
|
||||
if (err) return callback(err);
|
||||
if (section) body += section;
|
||||
--count || callback(null, body + '--' + boundary + '--');
|
||||
};
|
||||
|
||||
for (var key in object) {
|
||||
var value = object[key];
|
||||
if (value === null || typeof value == 'undefined') {
|
||||
done();
|
||||
} else if (Buffer.isBuffer(value)) {
|
||||
var part = { buffer: value, content_type: 'application/octet-stream' };
|
||||
generate_part(key, part, boundary, done);
|
||||
} else {
|
||||
var part = (value.buffer || value.file || value.content_type) ? value : { value: value };
|
||||
generate_part(key, part, boundary, done);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function generate_part(name, part, boundary, callback) {
|
||||
|
||||
var return_part = '--' + boundary + '\r\n';
|
||||
return_part += 'Content-Disposition: form-data; name="' + name + '"';
|
||||
|
||||
function append(data, filename) {
|
||||
|
||||
if (data) {
|
||||
var binary = part.content_type.indexOf('text') == -1;
|
||||
return_part += '; filename="' + encodeURIComponent(filename) + '"\r\n';
|
||||
if (binary) return_part += 'Content-Transfer-Encoding: binary\r\n';
|
||||
return_part += 'Content-Type: ' + part.content_type + '\r\n\r\n';
|
||||
return_part += binary ? data.toString('binary') : data.toString('utf8');
|
||||
}
|
||||
|
||||
callback(null, return_part + '\r\n');
|
||||
};
|
||||
|
||||
if ((part.file || part.buffer) && part.content_type) {
|
||||
|
||||
var filename = part.filename ? part.filename : part.file ? basename(part.file) : name;
|
||||
if (part.buffer) return append(part.buffer, filename);
|
||||
|
||||
readFile(part.file, function(err, data) {
|
||||
if (err) return callback(err);
|
||||
append(data, filename);
|
||||
});
|
||||
|
||||
} else {
|
||||
|
||||
if (typeof part.value == 'object')
|
||||
return callback(new Error('Object received for ' + name + ', expected string.'))
|
||||
|
||||
if (part.content_type) {
|
||||
return_part += '\r\n';
|
||||
return_part += 'Content-Type: ' + part.content_type;
|
||||
}
|
||||
|
||||
return_part += '\r\n\r\n';
|
||||
return_part += Buffer.from(String(part.value), 'utf8').toString('binary');
|
||||
append();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// flattens nested objects for multipart body
|
||||
function flatten(object, into, prefix) {
|
||||
into = into || {};
|
||||
|
||||
for(var key in object) {
|
||||
var prefix_key = prefix ? prefix + '[' + key + ']' : key;
|
||||
var prop = object[key];
|
||||
|
||||
if (prop && typeof prop === 'object' && !(prop.buffer || prop.file || prop.content_type))
|
||||
flatten(prop, into, prefix_key)
|
||||
else
|
||||
into[prefix_key] = prop;
|
||||
}
|
||||
|
||||
return into;
|
||||
}
|
879
node_modules/needle/lib/needle.js
generated
vendored
879
node_modules/needle/lib/needle.js
generated
vendored
|
@ -1,879 +0,0 @@
|
|||
//////////////////////////////////////////
|
||||
// Needle -- HTTP Client for Node.js
|
||||
// Written by Tomás Pollak <tomas@forkhq.com>
|
||||
// (c) 2012-2020 - Fork Ltd.
|
||||
// MIT Licensed
|
||||
//////////////////////////////////////////
|
||||
|
||||
var fs = require('fs'),
|
||||
http = require('http'),
|
||||
https = require('https'),
|
||||
url = require('url'),
|
||||
stream = require('stream'),
|
||||
debug = require('debug')('needle'),
|
||||
stringify = require('./querystring').build,
|
||||
multipart = require('./multipart'),
|
||||
auth = require('./auth'),
|
||||
cookies = require('./cookies'),
|
||||
parsers = require('./parsers'),
|
||||
decoder = require('./decoder');
|
||||
|
||||
//////////////////////////////////////////
|
||||
// variabilia
|
||||
|
||||
var version = require('../package.json').version;
|
||||
|
||||
var user_agent = 'Needle/' + version;
|
||||
user_agent += ' (Node.js ' + process.version + '; ' + process.platform + ' ' + process.arch + ')';
|
||||
|
||||
var tls_options = 'agent pfx key passphrase cert ca ciphers rejectUnauthorized secureProtocol checkServerIdentity family';
|
||||
|
||||
// older versions of node (< 0.11.4) prevent the runtime from exiting
|
||||
// because of connections in keep-alive state. so if this is the case
|
||||
// we'll default new requests to set a Connection: close header.
|
||||
var close_by_default = !http.Agent || http.Agent.defaultMaxSockets != Infinity;
|
||||
|
||||
// see if we have Object.assign. otherwise fall back to util._extend
|
||||
var extend = Object.assign ? Object.assign : require('util')._extend;
|
||||
|
||||
// these are the status codes that Needle interprets as redirects.
|
||||
var redirect_codes = [301, 302, 303, 307, 308];
|
||||
|
||||
//////////////////////////////////////////
|
||||
// decompressors for gzip/deflate/br bodies
|
||||
|
||||
function bind_opts(fn, options) {
|
||||
return fn.bind(null, options);
|
||||
}
|
||||
|
||||
var decompressors = {};
|
||||
|
||||
try {
|
||||
|
||||
var zlib = require('zlib');
|
||||
|
||||
// Enable Z_SYNC_FLUSH to avoid Z_BUF_ERROR errors (Node PR #2595)
|
||||
var zlib_options = {
|
||||
flush: zlib.Z_SYNC_FLUSH,
|
||||
finishFlush: zlib.Z_SYNC_FLUSH
|
||||
};
|
||||
|
||||
var br_options = {
|
||||
flush: zlib.BROTLI_OPERATION_FLUSH,
|
||||
finishFlush: zlib.BROTLI_OPERATION_FLUSH
|
||||
};
|
||||
|
||||
decompressors['x-deflate'] = bind_opts(zlib.Inflate, zlib_options);
|
||||
decompressors['deflate'] = bind_opts(zlib.Inflate, zlib_options);
|
||||
decompressors['x-gzip'] = bind_opts(zlib.Gunzip, zlib_options);
|
||||
decompressors['gzip'] = bind_opts(zlib.Gunzip, zlib_options);
|
||||
if (typeof zlib.BrotliDecompress === 'function') {
|
||||
decompressors['br'] = bind_opts(zlib.BrotliDecompress, br_options);
|
||||
}
|
||||
|
||||
} catch(e) { /* zlib not available */ }
|
||||
|
||||
//////////////////////////////////////////
|
||||
// options and aliases
|
||||
|
||||
var defaults = {
|
||||
// data
|
||||
boundary : '--------------------NODENEEDLEHTTPCLIENT',
|
||||
encoding : 'utf8',
|
||||
parse_response : 'all', // same as true. valid options: 'json', 'xml' or false/null
|
||||
proxy : null,
|
||||
|
||||
// headers
|
||||
headers : {},
|
||||
accept : '*/*',
|
||||
user_agent : user_agent,
|
||||
|
||||
// numbers
|
||||
open_timeout : 10000,
|
||||
response_timeout : 0,
|
||||
read_timeout : 0,
|
||||
follow_max : 0,
|
||||
stream_length : -1,
|
||||
|
||||
// booleans
|
||||
compressed : false,
|
||||
decode_response : true,
|
||||
parse_cookies : true,
|
||||
follow_set_cookies : false,
|
||||
follow_set_referer : false,
|
||||
follow_keep_method : false,
|
||||
follow_if_same_host : false,
|
||||
follow_if_same_protocol : false,
|
||||
follow_if_same_location : false
|
||||
}
|
||||
|
||||
var aliased = {
|
||||
options: {
|
||||
decode : 'decode_response',
|
||||
parse : 'parse_response',
|
||||
timeout : 'open_timeout',
|
||||
follow : 'follow_max'
|
||||
},
|
||||
inverted: {}
|
||||
}
|
||||
|
||||
// only once, invert aliased keys so we can get passed options.
|
||||
Object.keys(aliased.options).map(function(k) {
|
||||
var value = aliased.options[k];
|
||||
aliased.inverted[value] = k;
|
||||
});
|
||||
|
||||
//////////////////////////////////////////
|
||||
// helpers
|
||||
|
||||
function keys_by_type(type) {
|
||||
return Object.keys(defaults).map(function(el) {
|
||||
if (defaults[el] !== null && defaults[el].constructor == type)
|
||||
return el;
|
||||
}).filter(function(el) { return el })
|
||||
}
|
||||
|
||||
function parse_content_type(header) {
|
||||
if (!header || header === '') return {};
|
||||
|
||||
var found, charset = 'utf8', arr = header.split(';');
|
||||
|
||||
if (arr.length > 1 && (found = arr[1].match(/charset=(.+)/)))
|
||||
charset = found[1];
|
||||
|
||||
return { type: arr[0], charset: charset };
|
||||
}
|
||||
|
||||
function is_stream(obj) {
|
||||
return typeof obj.pipe === 'function';
|
||||
}
|
||||
|
||||
function get_stream_length(stream, given_length, cb) {
|
||||
if (given_length > 0)
|
||||
return cb(given_length);
|
||||
|
||||
if (stream.end !== void 0 && stream.end !== Infinity && stream.start !== void 0)
|
||||
return cb((stream.end + 1) - (stream.start || 0));
|
||||
|
||||
fs.stat(stream.path, function(err, stat) {
|
||||
cb(stat ? stat.size - (stream.start || 0) : null);
|
||||
});
|
||||
}
|
||||
|
||||
function resolve_url(href, base) {
|
||||
if (url.URL)
|
||||
return new url.URL(href, base);
|
||||
|
||||
// older Node version (< v6.13)
|
||||
return url.resolve(base, href);
|
||||
}
|
||||
|
||||
function pump_streams(streams, cb) {
|
||||
if (stream.pipeline)
|
||||
return stream.pipeline.apply(null, streams.concat(cb));
|
||||
|
||||
var tmp = streams.shift();
|
||||
while (streams.length) {
|
||||
tmp = tmp.pipe(streams.shift());
|
||||
tmp.once('error', function(e) {
|
||||
cb && cb(e);
|
||||
cb = null;
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////////
|
||||
// the main act
|
||||
|
||||
function Needle(method, uri, data, options, callback) {
|
||||
// if (!(this instanceof Needle)) {
|
||||
// return new Needle(method, uri, data, options, callback);
|
||||
// }
|
||||
|
||||
if (typeof uri !== 'string')
|
||||
throw new TypeError('URL must be a string, not ' + uri);
|
||||
|
||||
this.method = method.toLowerCase();
|
||||
this.uri = uri;
|
||||
this.data = data;
|
||||
|
||||
if (typeof options == 'function') {
|
||||
this.callback = options;
|
||||
this.options = {};
|
||||
} else {
|
||||
this.callback = callback;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Needle.prototype.setup = function(uri, options) {
|
||||
|
||||
function get_option(key, fallback) {
|
||||
// if original is in options, return that value
|
||||
if (typeof options[key] != 'undefined') return options[key];
|
||||
|
||||
// otherwise, return value from alias or fallback/undefined
|
||||
return typeof options[aliased.inverted[key]] != 'undefined'
|
||||
? options[aliased.inverted[key]] : fallback;
|
||||
}
|
||||
|
||||
function check_value(expected, key) {
|
||||
var value = get_option(key),
|
||||
type = typeof value;
|
||||
|
||||
if (type != 'undefined' && type != expected)
|
||||
throw new TypeError(type + ' received for ' + key + ', but expected a ' + expected);
|
||||
|
||||
return (type == expected) ? value : defaults[key];
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////
|
||||
// the basics
|
||||
|
||||
var config = {
|
||||
http_opts : {
|
||||
localAddress: get_option('localAddress', undefined),
|
||||
lookup: get_option('lookup', undefined)
|
||||
}, // passed later to http.request() directly
|
||||
headers : {},
|
||||
output : options.output,
|
||||
proxy : get_option('proxy', defaults.proxy),
|
||||
parser : get_option('parse_response', defaults.parse_response),
|
||||
encoding : options.encoding || (options.multipart ? 'binary' : defaults.encoding)
|
||||
}
|
||||
|
||||
keys_by_type(Boolean).forEach(function(key) {
|
||||
config[key] = check_value('boolean', key);
|
||||
})
|
||||
|
||||
keys_by_type(Number).forEach(function(key) {
|
||||
config[key] = check_value('number', key);
|
||||
})
|
||||
|
||||
// populate http_opts with given TLS options
|
||||
tls_options.split(' ').forEach(function(key) {
|
||||
if (typeof options[key] != 'undefined') {
|
||||
config.http_opts[key] = options[key];
|
||||
if (typeof options.agent == 'undefined')
|
||||
config.http_opts.agent = false; // otherwise tls options are skipped
|
||||
}
|
||||
});
|
||||
|
||||
//////////////////////////////////////////////////
|
||||
// headers, cookies
|
||||
|
||||
for (var key in defaults.headers)
|
||||
config.headers[key] = defaults.headers[key];
|
||||
|
||||
config.headers['accept'] = options.accept || defaults.accept;
|
||||
config.headers['user-agent'] = options.user_agent || defaults.user_agent;
|
||||
|
||||
if (options.content_type)
|
||||
config.headers['content-type'] = options.content_type;
|
||||
|
||||
// set connection header if opts.connection was passed, or if node < 0.11.4 (close)
|
||||
if (options.connection || close_by_default)
|
||||
config.headers['connection'] = options.connection || 'close';
|
||||
|
||||
if ((options.compressed || defaults.compressed) && typeof zlib != 'undefined')
|
||||
config.headers['accept-encoding'] = decompressors['br'] ? 'gzip, deflate, br' : 'gzip, deflate';
|
||||
|
||||
if (options.cookies)
|
||||
config.headers['cookie'] = cookies.write(options.cookies);
|
||||
|
||||
//////////////////////////////////////////////////
|
||||
// basic/digest auth
|
||||
|
||||
if (uri.match(/[^\/]@/)) { // url contains user:pass@host, so parse it.
|
||||
var parts = (url.parse(uri).auth || '').split(':');
|
||||
options.username = parts[0];
|
||||
options.password = parts[1];
|
||||
}
|
||||
|
||||
if (options.username) {
|
||||
if (options.auth && (options.auth == 'auto' || options.auth == 'digest')) {
|
||||
config.credentials = [options.username, options.password];
|
||||
} else {
|
||||
config.headers['authorization'] = auth.basic(options.username, options.password);
|
||||
}
|
||||
}
|
||||
|
||||
// if proxy is present, set auth header from either url or proxy_user option.
|
||||
if (config.proxy) {
|
||||
if (config.proxy.indexOf('http') === -1)
|
||||
config.proxy = 'http://' + config.proxy;
|
||||
|
||||
if (config.proxy.indexOf('@') !== -1) {
|
||||
var proxy = (url.parse(config.proxy).auth || '').split(':');
|
||||
options.proxy_user = proxy[0];
|
||||
options.proxy_pass = proxy[1];
|
||||
}
|
||||
|
||||
if (options.proxy_user)
|
||||
config.headers['proxy-authorization'] = auth.basic(options.proxy_user, options.proxy_pass);
|
||||
}
|
||||
|
||||
// now that all our headers are set, overwrite them if instructed.
|
||||
for (var h in options.headers)
|
||||
config.headers[h.toLowerCase()] = options.headers[h];
|
||||
|
||||
config.uri_modifier = get_option('uri_modifier', null);
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
Needle.prototype.start = function() {
|
||||
|
||||
var out = new stream.PassThrough({ objectMode: false }),
|
||||
uri = this.uri,
|
||||
data = this.data,
|
||||
method = this.method,
|
||||
callback = (typeof this.options == 'function') ? this.options : this.callback,
|
||||
options = this.options || {};
|
||||
|
||||
// if no 'http' is found on URL, prepend it.
|
||||
if (uri.indexOf('http') === -1)
|
||||
uri = uri.replace(/^(\/\/)?/, 'http://');
|
||||
|
||||
var self = this, body, waiting = false, config = this.setup(uri, options);
|
||||
|
||||
// unless options.json was set to false, assume boss also wants JSON if content-type matches.
|
||||
var json = options.json || (options.json !== false && config.headers['content-type'] == 'application/json');
|
||||
|
||||
if (data) {
|
||||
|
||||
if (options.multipart) { // boss says we do multipart. so we do it.
|
||||
var boundary = options.boundary || defaults.boundary;
|
||||
|
||||
waiting = true;
|
||||
multipart.build(data, boundary, function(err, parts) {
|
||||
if (err) throw(err);
|
||||
|
||||
config.headers['content-type'] = 'multipart/form-data; boundary=' + boundary;
|
||||
next(parts);
|
||||
});
|
||||
|
||||
} else if (is_stream(data)) {
|
||||
|
||||
if (method == 'get')
|
||||
throw new Error('Refusing to pipe() a stream via GET. Did you mean .post?');
|
||||
|
||||
if (config.stream_length > 0 || (config.stream_length === 0 && data.path)) {
|
||||
// ok, let's get the stream's length and set it as the content-length header.
|
||||
// this prevents some servers from cutting us off before all the data is sent.
|
||||
waiting = true;
|
||||
get_stream_length(data, config.stream_length, function(length) {
|
||||
data.length = length;
|
||||
next(data);
|
||||
})
|
||||
|
||||
} else {
|
||||
// if the boss doesn't want us to get the stream's length, or if it doesn't
|
||||
// have a file descriptor for that purpose, then just head on.
|
||||
body = data;
|
||||
}
|
||||
|
||||
} else if (Buffer.isBuffer(data)) {
|
||||
|
||||
body = data; // use the raw buffer as request body.
|
||||
|
||||
} else if (method == 'get' && !json) {
|
||||
|
||||
// append the data to the URI as a querystring.
|
||||
uri = uri.replace(/\?.*|$/, '?' + stringify(data));
|
||||
|
||||
} else { // string or object data, no multipart.
|
||||
|
||||
// if string, leave it as it is, otherwise, stringify.
|
||||
body = (typeof(data) === 'string') ? data
|
||||
: json ? JSON.stringify(data) : stringify(data);
|
||||
|
||||
// ensure we have a buffer so bytecount is correct.
|
||||
body = Buffer.from(body, config.encoding);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function next(body) {
|
||||
if (body) {
|
||||
if (body.length) config.headers['content-length'] = body.length;
|
||||
|
||||
// if no content-type was passed, determine if json or not.
|
||||
if (!config.headers['content-type']) {
|
||||
config.headers['content-type'] = json
|
||||
? 'application/json; charset=utf-8'
|
||||
: 'application/x-www-form-urlencoded'; // no charset says W3 spec.
|
||||
}
|
||||
}
|
||||
|
||||
// unless a specific accept header was set, assume json: true wants JSON back.
|
||||
if (options.json && (!options.accept && !(options.headers || {}).accept))
|
||||
config.headers['accept'] = 'application/json';
|
||||
|
||||
self.send_request(1, method, uri, config, body, out, callback);
|
||||
}
|
||||
|
||||
if (!waiting) next(body);
|
||||
return out;
|
||||
}
|
||||
|
||||
Needle.prototype.get_request_opts = function(method, uri, config) {
|
||||
var opts = config.http_opts,
|
||||
proxy = config.proxy,
|
||||
remote = proxy ? url.parse(proxy) : url.parse(uri);
|
||||
|
||||
opts.protocol = remote.protocol;
|
||||
opts.host = remote.hostname;
|
||||
opts.port = remote.port || (remote.protocol == 'https:' ? 443 : 80);
|
||||
opts.path = proxy ? uri : remote.pathname + (remote.search || '');
|
||||
opts.method = method;
|
||||
opts.headers = config.headers;
|
||||
|
||||
if (!opts.headers['host']) {
|
||||
// if using proxy, make sure the host header shows the final destination
|
||||
var target = proxy ? url.parse(uri) : remote;
|
||||
opts.headers['host'] = target.hostname;
|
||||
|
||||
// and if a non standard port was passed, append it to the port header
|
||||
if (target.port && [80, 443].indexOf(target.port) === -1) {
|
||||
opts.headers['host'] += ':' + target.port;
|
||||
}
|
||||
}
|
||||
|
||||
return opts;
|
||||
}
|
||||
|
||||
Needle.prototype.should_follow = function(location, config, original) {
|
||||
if (!location) return false;
|
||||
|
||||
// returns true if location contains matching property (host or protocol)
|
||||
function matches(property) {
|
||||
var property = original[property];
|
||||
return location.indexOf(property) !== -1;
|
||||
}
|
||||
|
||||
// first, check whether the requested location is actually different from the original
|
||||
if (!config.follow_if_same_location && location === original)
|
||||
return false;
|
||||
|
||||
if (config.follow_if_same_host && !matches('host'))
|
||||
return false; // host does not match, so not following
|
||||
|
||||
if (config.follow_if_same_protocol && !matches('protocol'))
|
||||
return false; // procotol does not match, so not following
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
Needle.prototype.send_request = function(count, method, uri, config, post_data, out, callback) {
|
||||
|
||||
if (typeof config.uri_modifier === 'function') {
|
||||
var modified_uri = config.uri_modifier(uri);
|
||||
debug('Modifying request URI', uri + ' => ' + modified_uri);
|
||||
uri = modified_uri;
|
||||
}
|
||||
|
||||
var request,
|
||||
timer,
|
||||
returned = 0,
|
||||
self = this,
|
||||
request_opts = this.get_request_opts(method, uri, config),
|
||||
protocol = request_opts.protocol == 'https:' ? https : http;
|
||||
|
||||
function done(err, resp) {
|
||||
if (returned++ > 0)
|
||||
return debug('Already finished, stopping here.');
|
||||
|
||||
if (timer) clearTimeout(timer);
|
||||
request.removeListener('error', had_error);
|
||||
out.done = true;
|
||||
|
||||
if (callback)
|
||||
return callback(err, resp, resp ? resp.body : undefined);
|
||||
|
||||
// NOTE: this event used to be called 'end', but the behaviour was confusing
|
||||
// when errors ocurred, because the stream would still emit an 'end' event.
|
||||
out.emit('done', err);
|
||||
|
||||
// trigger the 'done' event on streams we're being piped to, if any
|
||||
var pipes = out._readableState.pipes || [];
|
||||
if (!pipes.forEach) pipes = [pipes];
|
||||
pipes.forEach(function(st) { st.emit('done', err); })
|
||||
}
|
||||
|
||||
function had_error(err) {
|
||||
debug('Request error', err);
|
||||
out.emit('err', err);
|
||||
done(err || new Error('Unknown error when making request.'));
|
||||
}
|
||||
|
||||
function set_timeout(type, milisecs) {
|
||||
if (timer) clearTimeout(timer);
|
||||
if (milisecs <= 0) return;
|
||||
|
||||
timer = setTimeout(function() {
|
||||
out.emit('timeout', type);
|
||||
request.abort();
|
||||
// also invoke done() to terminate job on read_timeout
|
||||
if (type == 'read') done(new Error(type + ' timeout'));
|
||||
}, milisecs);
|
||||
}
|
||||
|
||||
// handle errors on the underlying socket, that may be closed while writing
|
||||
// for an example case, see test/long_string_spec.js. we make sure this
|
||||
// scenario ocurred by verifying the socket's writable & destroyed states.
|
||||
function on_socket_end() {
|
||||
if (returned && !this.writable && this.destroyed === false) {
|
||||
this.destroy();
|
||||
had_error(new Error('Remote end closed socket abruptly.'))
|
||||
}
|
||||
}
|
||||
|
||||
debug('Making request #' + count, request_opts);
|
||||
request = protocol.request(request_opts, function(resp) {
|
||||
|
||||
var headers = resp.headers;
|
||||
debug('Got response', resp.statusCode, headers);
|
||||
out.emit('response', resp);
|
||||
|
||||
set_timeout('read', config.read_timeout);
|
||||
|
||||
// if we got cookies, parse them unless we were instructed not to. make sure to include any
|
||||
// cookies that might have been set on previous redirects.
|
||||
if (config.parse_cookies && (headers['set-cookie'] || config.previous_resp_cookies)) {
|
||||
resp.cookies = extend(config.previous_resp_cookies || {}, cookies.read(headers['set-cookie']));
|
||||
debug('Got cookies', resp.cookies);
|
||||
}
|
||||
|
||||
// if redirect code is found, determine if we should follow it according to the given options.
|
||||
if (redirect_codes.indexOf(resp.statusCode) !== -1 && self.should_follow(headers.location, config, uri)) {
|
||||
// clear timer before following redirects to prevent unexpected setTimeout consequence
|
||||
clearTimeout(timer);
|
||||
|
||||
if (count <= config.follow_max) {
|
||||
out.emit('redirect', headers.location);
|
||||
|
||||
// unless 'follow_keep_method' is true, rewrite the request to GET before continuing.
|
||||
if (!config.follow_keep_method) {
|
||||
method = 'GET';
|
||||
post_data = null;
|
||||
delete config.headers['content-length']; // in case the original was a multipart POST request.
|
||||
}
|
||||
|
||||
// if follow_set_cookies is true, insert cookies in the next request's headers.
|
||||
// we set both the original request cookies plus any response cookies we might have received.
|
||||
if (config.follow_set_cookies) {
|
||||
var request_cookies = cookies.read(config.headers['cookie']);
|
||||
config.previous_resp_cookies = resp.cookies;
|
||||
if (Object.keys(request_cookies).length || Object.keys(resp.cookies || {}).length) {
|
||||
config.headers['cookie'] = cookies.write(extend(request_cookies, resp.cookies));
|
||||
}
|
||||
} else if (config.headers['cookie']) {
|
||||
debug('Clearing original request cookie', config.headers['cookie']);
|
||||
delete config.headers['cookie'];
|
||||
}
|
||||
|
||||
if (config.follow_set_referer)
|
||||
config.headers['referer'] = encodeURI(uri); // the original, not the destination URL.
|
||||
|
||||
config.headers['host'] = null; // clear previous Host header to avoid conflicts.
|
||||
|
||||
var redirect_url = resolve_url(headers.location, uri);
|
||||
debug('Redirecting to ' + redirect_url.toString());
|
||||
return self.send_request(++count, method, redirect_url.toString(), config, post_data, out, callback);
|
||||
} else if (config.follow_max > 0) {
|
||||
return done(new Error('Max redirects reached. Possible loop in: ' + headers.location));
|
||||
}
|
||||
}
|
||||
|
||||
// if auth is requested and credentials were not passed, resend request, provided we have user/pass.
|
||||
if (resp.statusCode == 401 && headers['www-authenticate'] && config.credentials) {
|
||||
if (!config.headers['authorization']) { // only if authentication hasn't been sent
|
||||
var auth_header = auth.header(headers['www-authenticate'], config.credentials, request_opts);
|
||||
|
||||
if (auth_header) {
|
||||
config.headers['authorization'] = auth_header;
|
||||
return self.send_request(count, method, uri, config, post_data, out, callback);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ok, so we got a valid (non-redirect & authorized) response. let's notify the stream guys.
|
||||
out.emit('header', resp.statusCode, headers);
|
||||
out.emit('headers', headers);
|
||||
|
||||
var pipeline = [],
|
||||
mime = parse_content_type(headers['content-type']),
|
||||
text_response = mime.type && (mime.type.indexOf('text/') != -1 || !!mime.type.match(/(\/|\+)(xml|json)$/));
|
||||
|
||||
// To start, if our body is compressed and we're able to inflate it, do it.
|
||||
if (headers['content-encoding'] && decompressors[headers['content-encoding']]) {
|
||||
|
||||
var decompressor = decompressors[headers['content-encoding']]();
|
||||
|
||||
// make sure we catch errors triggered by the decompressor.
|
||||
decompressor.on('error', had_error);
|
||||
pipeline.push(decompressor);
|
||||
}
|
||||
|
||||
// If parse is enabled and we have a parser for it, then go for it.
|
||||
if (config.parser && parsers[mime.type]) {
|
||||
|
||||
// If a specific parser was requested, make sure we don't parse other types.
|
||||
var parser_name = config.parser.toString().toLowerCase();
|
||||
if (['xml', 'json'].indexOf(parser_name) == -1 || parsers[mime.type].name == parser_name) {
|
||||
|
||||
// OK, so either we're parsing all content types or the one requested matches.
|
||||
out.parser = parsers[mime.type].name;
|
||||
pipeline.push(parsers[mime.type].fn());
|
||||
|
||||
// Set objectMode on out stream to improve performance.
|
||||
out._writableState.objectMode = true;
|
||||
out._readableState.objectMode = true;
|
||||
}
|
||||
|
||||
// If we're not parsing, and unless decoding was disabled, we'll try
|
||||
// decoding non UTF-8 bodies to UTF-8, using the iconv-lite library.
|
||||
} else if (text_response && config.decode_response && mime.charset) {
|
||||
pipeline.push(decoder(mime.charset));
|
||||
}
|
||||
|
||||
// And `out` is the stream we finally push the decoded/parsed output to.
|
||||
pipeline.push(out);
|
||||
|
||||
// Now, release the kraken!
|
||||
pump_streams([resp].concat(pipeline), function(err) {
|
||||
if (err) debug(err)
|
||||
|
||||
// on node v8.x, if an error ocurrs on the receiving end,
|
||||
// then we want to abort the request to avoid having dangling sockets
|
||||
if (err && err.message == 'write after end') request.destroy();
|
||||
});
|
||||
|
||||
// If the user has requested and output file, pipe the output stream to it.
|
||||
// In stream mode, we will still get the response stream to play with.
|
||||
if (config.output && resp.statusCode == 200) {
|
||||
|
||||
// for some reason, simply piping resp to the writable stream doesn't
|
||||
// work all the time (stream gets cut in the middle with no warning).
|
||||
// so we'll manually need to do the readable/write(chunk) trick.
|
||||
var file = fs.createWriteStream(config.output);
|
||||
file.on('error', had_error);
|
||||
|
||||
out.on('end', function() {
|
||||
if (file.writable) file.end();
|
||||
});
|
||||
|
||||
file.on('close', function() {
|
||||
delete out.file;
|
||||
})
|
||||
|
||||
out.on('readable', function() {
|
||||
var chunk;
|
||||
while ((chunk = this.read()) !== null) {
|
||||
if (file.writable) file.write(chunk);
|
||||
|
||||
// if callback was requested, also push it to resp.body
|
||||
if (resp.body) resp.body.push(chunk);
|
||||
}
|
||||
})
|
||||
|
||||
out.file = file;
|
||||
}
|
||||
|
||||
// Only aggregate the full body if a callback was requested.
|
||||
if (callback) {
|
||||
resp.raw = [];
|
||||
resp.body = [];
|
||||
resp.bytes = 0;
|
||||
|
||||
// Gather and count the amount of (raw) bytes using a PassThrough stream.
|
||||
var clean_pipe = new stream.PassThrough();
|
||||
|
||||
clean_pipe.on('readable', function() {
|
||||
var chunk;
|
||||
while ((chunk = this.read()) != null) {
|
||||
resp.bytes += chunk.length;
|
||||
resp.raw.push(chunk);
|
||||
}
|
||||
})
|
||||
|
||||
pump_streams([resp, clean_pipe], function(err) {
|
||||
if (err) debug(err);
|
||||
});
|
||||
|
||||
// Listen on the 'readable' event to aggregate the chunks, but only if
|
||||
// file output wasn't requested. Otherwise we'd have two stream readers.
|
||||
if (!config.output || resp.statusCode != 200) {
|
||||
out.on('readable', function() {
|
||||
var chunk;
|
||||
while ((chunk = this.read()) !== null) {
|
||||
// We're either pushing buffers or objects, never strings.
|
||||
if (typeof chunk == 'string') chunk = Buffer.from(chunk);
|
||||
|
||||
// Push all chunks to resp.body. We'll bind them in resp.end().
|
||||
resp.body.push(chunk);
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// And set the .body property once all data is in.
|
||||
out.on('end', function() {
|
||||
if (resp.body) { // callback mode
|
||||
|
||||
// we want to be able to access to the raw data later, so keep a reference.
|
||||
resp.raw = Buffer.concat(resp.raw);
|
||||
|
||||
// if parse was successful, we should have an array with one object
|
||||
if (resp.body[0] !== undefined && !Buffer.isBuffer(resp.body[0])) {
|
||||
|
||||
// that's our body right there.
|
||||
resp.body = resp.body[0];
|
||||
|
||||
// set the parser property on our response. we may want to check.
|
||||
if (out.parser) resp.parser = out.parser;
|
||||
|
||||
} else { // we got one or several buffers. string or binary.
|
||||
resp.body = Buffer.concat(resp.body);
|
||||
|
||||
// if we're here and parsed is true, it means we tried to but it didn't work.
|
||||
// so given that we got a text response, let's stringify it.
|
||||
if (text_response || out.parser) {
|
||||
resp.body = resp.body.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if an output file is being written to, make sure the callback
|
||||
// is triggered after all data has been written to it.
|
||||
if (out.file) {
|
||||
out.file.on('close', function() {
|
||||
done(null, resp);
|
||||
})
|
||||
} else { // elvis has left the building.
|
||||
done(null, resp);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
// out.on('error', function(err) {
|
||||
// had_error(err);
|
||||
// if (err.code == 'ERR_STREAM_DESTROYED' || err.code == 'ERR_STREAM_PREMATURE_CLOSE') {
|
||||
// request.abort();
|
||||
// }
|
||||
// })
|
||||
|
||||
}); // end request call
|
||||
|
||||
// unless open_timeout was disabled, set a timeout to abort the request.
|
||||
set_timeout('open', config.open_timeout);
|
||||
|
||||
// handle errors on the request object. things might get bumpy.
|
||||
request.on('error', had_error);
|
||||
|
||||
// make sure timer is cleared if request is aborted (issue #257)
|
||||
request.once('abort', function() {
|
||||
if (timer) clearTimeout(timer);
|
||||
})
|
||||
|
||||
// handle socket 'end' event to ensure we don't get delayed EPIPE errors.
|
||||
request.once('socket', function(socket) {
|
||||
if (socket.connecting) {
|
||||
socket.once('connect', function() {
|
||||
set_timeout('response', config.response_timeout);
|
||||
})
|
||||
} else {
|
||||
set_timeout('response', config.response_timeout);
|
||||
}
|
||||
|
||||
// socket.once('close', function(e) {
|
||||
// console.log('socket closed!', e);
|
||||
// })
|
||||
|
||||
if (!socket.on_socket_end) {
|
||||
socket.on_socket_end = on_socket_end;
|
||||
socket.once('end', function() { process.nextTick(on_socket_end.bind(socket)) });
|
||||
}
|
||||
})
|
||||
|
||||
if (post_data) {
|
||||
if (is_stream(post_data)) {
|
||||
pump_streams([post_data, request], function(err) {
|
||||
if (err) debug(err);
|
||||
});
|
||||
} else {
|
||||
request.write(post_data, config.encoding);
|
||||
request.end();
|
||||
}
|
||||
} else {
|
||||
request.end();
|
||||
}
|
||||
|
||||
out.abort = function() { request.abort() }; // easier access
|
||||
out.request = request;
|
||||
return out;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////
|
||||
// exports
|
||||
|
||||
if (typeof Promise !== 'undefined') {
|
||||
module.exports = function() {
|
||||
var verb, args = [].slice.call(arguments);
|
||||
|
||||
if (args[0].match(/\.|\//)) // first argument looks like a URL
|
||||
verb = (args.length > 2) ? 'post' : 'get';
|
||||
else
|
||||
verb = args.shift();
|
||||
|
||||
if (verb.match(/get|head/i) && args.length == 2)
|
||||
args.splice(1, 0, null); // assume no data if head/get with two args (url, options)
|
||||
|
||||
return new Promise(function(resolve, reject) {
|
||||
module.exports.request(verb, args[0], args[1], args[2], function(err, resp) {
|
||||
return err ? reject(err) : resolve(resp);
|
||||
});
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.version = version;
|
||||
|
||||
module.exports.defaults = function(obj) {
|
||||
for (var key in obj) {
|
||||
var target_key = aliased.options[key] || key;
|
||||
|
||||
if (defaults.hasOwnProperty(target_key) && typeof obj[key] != 'undefined') {
|
||||
if (target_key != 'parse_response' && target_key != 'proxy') {
|
||||
// ensure type matches the original, except for proxy/parse_response that can be null/bool or string
|
||||
var valid_type = defaults[target_key].constructor.name;
|
||||
|
||||
if (obj[key].constructor.name != valid_type)
|
||||
throw new TypeError('Invalid type for ' + key + ', should be ' + valid_type);
|
||||
}
|
||||
defaults[target_key] = obj[key];
|
||||
} else {
|
||||
throw new Error('Invalid property for defaults:' + target_key);
|
||||
}
|
||||
}
|
||||
|
||||
return defaults;
|
||||
}
|
||||
|
||||
'head get'.split(' ').forEach(function(method) {
|
||||
module.exports[method] = function(uri, options, callback) {
|
||||
return new Needle(method, uri, null, options, callback).start();
|
||||
}
|
||||
})
|
||||
|
||||
'post put patch delete'.split(' ').forEach(function(method) {
|
||||
module.exports[method] = function(uri, data, options, callback) {
|
||||
return new Needle(method, uri, data, options, callback).start();
|
||||
}
|
||||
})
|
||||
|
||||
module.exports.request = function(method, uri, data, opts, callback) {
|
||||
return new Needle(method, uri, data, opts, callback).start();
|
||||
};
|
121
node_modules/needle/lib/parsers.js
generated
vendored
121
node_modules/needle/lib/parsers.js
generated
vendored
|
@ -1,121 +0,0 @@
|
|||
//////////////////////////////////////////
|
||||
// Defines mappings between content-type
|
||||
// and the appropriate parsers.
|
||||
//////////////////////////////////////////
|
||||
|
||||
var Transform = require('stream').Transform;
|
||||
var sax = require('sax');
|
||||
|
||||
function parseXML(str, cb) {
|
||||
var obj, current, parser = sax.parser(true, { trim: true, lowercase: true })
|
||||
parser.onerror = parser.onend = done;
|
||||
|
||||
function done(err) {
|
||||
parser.onerror = parser.onend = function() { }
|
||||
cb(err, obj)
|
||||
}
|
||||
|
||||
function newElement(name, attributes) {
|
||||
return {
|
||||
name: name || '',
|
||||
value: '',
|
||||
attributes: attributes || {},
|
||||
children: []
|
||||
}
|
||||
}
|
||||
|
||||
parser.oncdata = parser.ontext = function(t) {
|
||||
if (current) current.value += t
|
||||
}
|
||||
|
||||
parser.onopentag = function(node) {
|
||||
var element = newElement(node.name, node.attributes)
|
||||
if (current) {
|
||||
element.parent = current
|
||||
current.children.push(element)
|
||||
} else { // root object
|
||||
obj = element
|
||||
}
|
||||
|
||||
current = element
|
||||
};
|
||||
|
||||
parser.onclosetag = function() {
|
||||
if (typeof current.parent !== 'undefined') {
|
||||
var just_closed = current
|
||||
current = current.parent
|
||||
delete just_closed.parent
|
||||
}
|
||||
}
|
||||
|
||||
parser.write(str).close()
|
||||
}
|
||||
|
||||
function parserFactory(name, fn) {
|
||||
|
||||
function parser() {
|
||||
var chunks = [],
|
||||
stream = new Transform({ objectMode: true });
|
||||
|
||||
// Buffer all our data
|
||||
stream._transform = function(chunk, encoding, done) {
|
||||
chunks.push(chunk);
|
||||
done();
|
||||
}
|
||||
|
||||
// And call the parser when all is there.
|
||||
stream._flush = function(done) {
|
||||
var self = this,
|
||||
data = Buffer.concat(chunks);
|
||||
|
||||
try {
|
||||
fn(data, function(err, result) {
|
||||
if (err) throw err;
|
||||
self.push(result);
|
||||
});
|
||||
} catch (err) {
|
||||
self.push(data); // just pass the original data
|
||||
} finally {
|
||||
done();
|
||||
}
|
||||
}
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
return { fn: parser, name: name };
|
||||
}
|
||||
|
||||
var parsers = {}
|
||||
|
||||
function buildParser(name, types, fn) {
|
||||
var parser = parserFactory(name, fn);
|
||||
types.forEach(function(type) {
|
||||
parsers[type] = parser;
|
||||
})
|
||||
}
|
||||
|
||||
buildParser('json', [
|
||||
'application/json',
|
||||
'text/javascript',
|
||||
'application/vnd.api+json'
|
||||
], function(buffer, cb) {
|
||||
var err, data;
|
||||
try { data = JSON.parse(buffer); } catch (e) { err = e; }
|
||||
cb(err, data);
|
||||
});
|
||||
|
||||
buildParser('xml', [
|
||||
'text/xml',
|
||||
'application/xml',
|
||||
'application/rdf+xml',
|
||||
'application/rss+xml',
|
||||
'application/atom+xml'
|
||||
], function(buffer, cb) {
|
||||
parseXML(buffer.toString(), function(err, obj) {
|
||||
cb(err, obj)
|
||||
})
|
||||
});
|
||||
|
||||
module.exports = parsers;
|
||||
module.exports.use = buildParser;
|
49
node_modules/needle/lib/querystring.js
generated
vendored
49
node_modules/needle/lib/querystring.js
generated
vendored
|
@ -1,49 +0,0 @@
|
|||
// based on the qs module, but handles null objects as expected
|
||||
// fixes by Tomas Pollak.
|
||||
|
||||
var toString = Object.prototype.toString;
|
||||
|
||||
function stringify(obj, prefix) {
|
||||
if (prefix && (obj === null || typeof obj == 'undefined')) {
|
||||
return prefix + '=';
|
||||
} else if (toString.call(obj) == '[object Array]') {
|
||||
return stringifyArray(obj, prefix);
|
||||
} else if (toString.call(obj) == '[object Object]') {
|
||||
return stringifyObject(obj, prefix);
|
||||
} else if (toString.call(obj) == '[object Date]') {
|
||||
return obj.toISOString();
|
||||
} else if (prefix) { // string inside array or hash
|
||||
return prefix + '=' + encodeURIComponent(String(obj));
|
||||
} else if (String(obj).indexOf('=') !== -1) { // string with equal sign
|
||||
return String(obj);
|
||||
} else {
|
||||
throw new TypeError('Cannot build a querystring out of: ' + obj);
|
||||
}
|
||||
};
|
||||
|
||||
function stringifyArray(arr, prefix) {
|
||||
var ret = [];
|
||||
|
||||
for (var i = 0, len = arr.length; i < len; i++) {
|
||||
if (prefix)
|
||||
ret.push(stringify(arr[i], prefix + '[]'));
|
||||
else
|
||||
ret.push(stringify(arr[i]));
|
||||
}
|
||||
|
||||
return ret.join('&');
|
||||
}
|
||||
|
||||
function stringifyObject(obj, prefix) {
|
||||
var ret = [];
|
||||
|
||||
Object.keys(obj).forEach(function(key) {
|
||||
ret.push(stringify(obj[key], prefix
|
||||
? prefix + '[' + encodeURIComponent(key) + ']'
|
||||
: encodeURIComponent(key)));
|
||||
})
|
||||
|
||||
return ret.join('&');
|
||||
}
|
||||
|
||||
exports.build = stringify;
|
19
node_modules/needle/license.txt
generated
vendored
19
node_modules/needle/license.txt
generated
vendored
|
@ -1,19 +0,0 @@
|
|||
Copyright (c) Fork, Ltd.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
395
node_modules/needle/node_modules/debug/CHANGELOG.md
generated
vendored
395
node_modules/needle/node_modules/debug/CHANGELOG.md
generated
vendored
|
@ -1,395 +0,0 @@
|
|||
|
||||
3.1.0 / 2017-09-26
|
||||
==================
|
||||
|
||||
* Add `DEBUG_HIDE_DATE` env var (#486)
|
||||
* Remove ReDoS regexp in %o formatter (#504)
|
||||
* Remove "component" from package.json
|
||||
* Remove `component.json`
|
||||
* Ignore package-lock.json
|
||||
* Examples: fix colors printout
|
||||
* Fix: browser detection
|
||||
* Fix: spelling mistake (#496, @EdwardBetts)
|
||||
|
||||
3.0.1 / 2017-08-24
|
||||
==================
|
||||
|
||||
* Fix: Disable colors in Edge and Internet Explorer (#489)
|
||||
|
||||
3.0.0 / 2017-08-08
|
||||
==================
|
||||
|
||||
* Breaking: Remove DEBUG_FD (#406)
|
||||
* Breaking: Use `Date#toISOString()` instead to `Date#toUTCString()` when output is not a TTY (#418)
|
||||
* Breaking: Make millisecond timer namespace specific and allow 'always enabled' output (#408)
|
||||
* Addition: document `enabled` flag (#465)
|
||||
* Addition: add 256 colors mode (#481)
|
||||
* Addition: `enabled()` updates existing debug instances, add `destroy()` function (#440)
|
||||
* Update: component: update "ms" to v2.0.0
|
||||
* Update: separate the Node and Browser tests in Travis-CI
|
||||
* Update: refactor Readme, fixed documentation, added "Namespace Colors" section, redid screenshots
|
||||
* Update: separate Node.js and web browser examples for organization
|
||||
* Update: update "browserify" to v14.4.0
|
||||
* Fix: fix Readme typo (#473)
|
||||
|
||||
2.6.9 / 2017-09-22
|
||||
==================
|
||||
|
||||
* remove ReDoS regexp in %o formatter (#504)
|
||||
|
||||
2.6.8 / 2017-05-18
|
||||
==================
|
||||
|
||||
* Fix: Check for undefined on browser globals (#462, @marbemac)
|
||||
|
||||
2.6.7 / 2017-05-16
|
||||
==================
|
||||
|
||||
* Fix: Update ms to 2.0.0 to fix regular expression denial of service vulnerability (#458, @hubdotcom)
|
||||
* Fix: Inline extend function in node implementation (#452, @dougwilson)
|
||||
* Docs: Fix typo (#455, @msasad)
|
||||
|
||||
2.6.5 / 2017-04-27
|
||||
==================
|
||||
|
||||
* Fix: null reference check on window.documentElement.style.WebkitAppearance (#447, @thebigredgeek)
|
||||
* Misc: clean up browser reference checks (#447, @thebigredgeek)
|
||||
* Misc: add npm-debug.log to .gitignore (@thebigredgeek)
|
||||
|
||||
|
||||
2.6.4 / 2017-04-20
|
||||
==================
|
||||
|
||||
* Fix: bug that would occur if process.env.DEBUG is a non-string value. (#444, @LucianBuzzo)
|
||||
* Chore: ignore bower.json in npm installations. (#437, @joaovieira)
|
||||
* Misc: update "ms" to v0.7.3 (@tootallnate)
|
||||
|
||||
2.6.3 / 2017-03-13
|
||||
==================
|
||||
|
||||
* Fix: Electron reference to `process.env.DEBUG` (#431, @paulcbetts)
|
||||
* Docs: Changelog fix (@thebigredgeek)
|
||||
|
||||
2.6.2 / 2017-03-10
|
||||
==================
|
||||
|
||||
* Fix: DEBUG_MAX_ARRAY_LENGTH (#420, @slavaGanzin)
|
||||
* Docs: Add backers and sponsors from Open Collective (#422, @piamancini)
|
||||
* Docs: Add Slackin invite badge (@tootallnate)
|
||||
|
||||
2.6.1 / 2017-02-10
|
||||
==================
|
||||
|
||||
* Fix: Module's `export default` syntax fix for IE8 `Expected identifier` error
|
||||
* Fix: Whitelist DEBUG_FD for values 1 and 2 only (#415, @pi0)
|
||||
* Fix: IE8 "Expected identifier" error (#414, @vgoma)
|
||||
* Fix: Namespaces would not disable once enabled (#409, @musikov)
|
||||
|
||||
2.6.0 / 2016-12-28
|
||||
==================
|
||||
|
||||
* Fix: added better null pointer checks for browser useColors (@thebigredgeek)
|
||||
* Improvement: removed explicit `window.debug` export (#404, @tootallnate)
|
||||
* Improvement: deprecated `DEBUG_FD` environment variable (#405, @tootallnate)
|
||||
|
||||
2.5.2 / 2016-12-25
|
||||
==================
|
||||
|
||||
* Fix: reference error on window within webworkers (#393, @KlausTrainer)
|
||||
* Docs: fixed README typo (#391, @lurch)
|
||||
* Docs: added notice about v3 api discussion (@thebigredgeek)
|
||||
|
||||
2.5.1 / 2016-12-20
|
||||
==================
|
||||
|
||||
* Fix: babel-core compatibility
|
||||
|
||||
2.5.0 / 2016-12-20
|
||||
==================
|
||||
|
||||
* Fix: wrong reference in bower file (@thebigredgeek)
|
||||
* Fix: webworker compatibility (@thebigredgeek)
|
||||
* Fix: output formatting issue (#388, @kribblo)
|
||||
* Fix: babel-loader compatibility (#383, @escwald)
|
||||
* Misc: removed built asset from repo and publications (@thebigredgeek)
|
||||
* Misc: moved source files to /src (#378, @yamikuronue)
|
||||
* Test: added karma integration and replaced babel with browserify for browser tests (#378, @yamikuronue)
|
||||
* Test: coveralls integration (#378, @yamikuronue)
|
||||
* Docs: simplified language in the opening paragraph (#373, @yamikuronue)
|
||||
|
||||
2.4.5 / 2016-12-17
|
||||
==================
|
||||
|
||||
* Fix: `navigator` undefined in Rhino (#376, @jochenberger)
|
||||
* Fix: custom log function (#379, @hsiliev)
|
||||
* Improvement: bit of cleanup + linting fixes (@thebigredgeek)
|
||||
* Improvement: rm non-maintainted `dist/` dir (#375, @freewil)
|
||||
* Docs: simplified language in the opening paragraph. (#373, @yamikuronue)
|
||||
|
||||
2.4.4 / 2016-12-14
|
||||
==================
|
||||
|
||||
* Fix: work around debug being loaded in preload scripts for electron (#368, @paulcbetts)
|
||||
|
||||
2.4.3 / 2016-12-14
|
||||
==================
|
||||
|
||||
* Fix: navigation.userAgent error for react native (#364, @escwald)
|
||||
|
||||
2.4.2 / 2016-12-14
|
||||
==================
|
||||
|
||||
* Fix: browser colors (#367, @tootallnate)
|
||||
* Misc: travis ci integration (@thebigredgeek)
|
||||
* Misc: added linting and testing boilerplate with sanity check (@thebigredgeek)
|
||||
|
||||
2.4.1 / 2016-12-13
|
||||
==================
|
||||
|
||||
* Fix: typo that broke the package (#356)
|
||||
|
||||
2.4.0 / 2016-12-13
|
||||
==================
|
||||
|
||||
* Fix: bower.json references unbuilt src entry point (#342, @justmatt)
|
||||
* Fix: revert "handle regex special characters" (@tootallnate)
|
||||
* Feature: configurable util.inspect()`options for NodeJS (#327, @tootallnate)
|
||||
* Feature: %O`(big O) pretty-prints objects (#322, @tootallnate)
|
||||
* Improvement: allow colors in workers (#335, @botverse)
|
||||
* Improvement: use same color for same namespace. (#338, @lchenay)
|
||||
|
||||
2.3.3 / 2016-11-09
|
||||
==================
|
||||
|
||||
* Fix: Catch `JSON.stringify()` errors (#195, Jovan Alleyne)
|
||||
* Fix: Returning `localStorage` saved values (#331, Levi Thomason)
|
||||
* Improvement: Don't create an empty object when no `process` (Nathan Rajlich)
|
||||
|
||||
2.3.2 / 2016-11-09
|
||||
==================
|
||||
|
||||
* Fix: be super-safe in index.js as well (@TooTallNate)
|
||||
* Fix: should check whether process exists (Tom Newby)
|
||||
|
||||
2.3.1 / 2016-11-09
|
||||
==================
|
||||
|
||||
* Fix: Added electron compatibility (#324, @paulcbetts)
|
||||
* Improvement: Added performance optimizations (@tootallnate)
|
||||
* Readme: Corrected PowerShell environment variable example (#252, @gimre)
|
||||
* Misc: Removed yarn lock file from source control (#321, @fengmk2)
|
||||
|
||||
2.3.0 / 2016-11-07
|
||||
==================
|
||||
|
||||
* Fix: Consistent placement of ms diff at end of output (#215, @gorangajic)
|
||||
* Fix: Escaping of regex special characters in namespace strings (#250, @zacronos)
|
||||
* Fix: Fixed bug causing crash on react-native (#282, @vkarpov15)
|
||||
* Feature: Enabled ES6+ compatible import via default export (#212 @bucaran)
|
||||
* Feature: Added %O formatter to reflect Chrome's console.log capability (#279, @oncletom)
|
||||
* Package: Update "ms" to 0.7.2 (#315, @DevSide)
|
||||
* Package: removed superfluous version property from bower.json (#207 @kkirsche)
|
||||
* Readme: fix USE_COLORS to DEBUG_COLORS
|
||||
* Readme: Doc fixes for format string sugar (#269, @mlucool)
|
||||
* Readme: Updated docs for DEBUG_FD and DEBUG_COLORS environment variables (#232, @mattlyons0)
|
||||
* Readme: doc fixes for PowerShell (#271 #243, @exoticknight @unreadable)
|
||||
* Readme: better docs for browser support (#224, @matthewmueller)
|
||||
* Tooling: Added yarn integration for development (#317, @thebigredgeek)
|
||||
* Misc: Renamed History.md to CHANGELOG.md (@thebigredgeek)
|
||||
* Misc: Added license file (#226 #274, @CantemoInternal @sdaitzman)
|
||||
* Misc: Updated contributors (@thebigredgeek)
|
||||
|
||||
2.2.0 / 2015-05-09
|
||||
==================
|
||||
|
||||
* package: update "ms" to v0.7.1 (#202, @dougwilson)
|
||||
* README: add logging to file example (#193, @DanielOchoa)
|
||||
* README: fixed a typo (#191, @amir-s)
|
||||
* browser: expose `storage` (#190, @stephenmathieson)
|
||||
* Makefile: add a `distclean` target (#189, @stephenmathieson)
|
||||
|
||||
2.1.3 / 2015-03-13
|
||||
==================
|
||||
|
||||
* Updated stdout/stderr example (#186)
|
||||
* Updated example/stdout.js to match debug current behaviour
|
||||
* Renamed example/stderr.js to stdout.js
|
||||
* Update Readme.md (#184)
|
||||
* replace high intensity foreground color for bold (#182, #183)
|
||||
|
||||
2.1.2 / 2015-03-01
|
||||
==================
|
||||
|
||||
* dist: recompile
|
||||
* update "ms" to v0.7.0
|
||||
* package: update "browserify" to v9.0.3
|
||||
* component: fix "ms.js" repo location
|
||||
* changed bower package name
|
||||
* updated documentation about using debug in a browser
|
||||
* fix: security error on safari (#167, #168, @yields)
|
||||
|
||||
2.1.1 / 2014-12-29
|
||||
==================
|
||||
|
||||
* browser: use `typeof` to check for `console` existence
|
||||
* browser: check for `console.log` truthiness (fix IE 8/9)
|
||||
* browser: add support for Chrome apps
|
||||
* Readme: added Windows usage remarks
|
||||
* Add `bower.json` to properly support bower install
|
||||
|
||||
2.1.0 / 2014-10-15
|
||||
==================
|
||||
|
||||
* node: implement `DEBUG_FD` env variable support
|
||||
* package: update "browserify" to v6.1.0
|
||||
* package: add "license" field to package.json (#135, @panuhorsmalahti)
|
||||
|
||||
2.0.0 / 2014-09-01
|
||||
==================
|
||||
|
||||
* package: update "browserify" to v5.11.0
|
||||
* node: use stderr rather than stdout for logging (#29, @stephenmathieson)
|
||||
|
||||
1.0.4 / 2014-07-15
|
||||
==================
|
||||
|
||||
* dist: recompile
|
||||
* example: remove `console.info()` log usage
|
||||
* example: add "Content-Type" UTF-8 header to browser example
|
||||
* browser: place %c marker after the space character
|
||||
* browser: reset the "content" color via `color: inherit`
|
||||
* browser: add colors support for Firefox >= v31
|
||||
* debug: prefer an instance `log()` function over the global one (#119)
|
||||
* Readme: update documentation about styled console logs for FF v31 (#116, @wryk)
|
||||
|
||||
1.0.3 / 2014-07-09
|
||||
==================
|
||||
|
||||
* Add support for multiple wildcards in namespaces (#122, @seegno)
|
||||
* browser: fix lint
|
||||
|
||||
1.0.2 / 2014-06-10
|
||||
==================
|
||||
|
||||
* browser: update color palette (#113, @gscottolson)
|
||||
* common: make console logging function configurable (#108, @timoxley)
|
||||
* node: fix %o colors on old node <= 0.8.x
|
||||
* Makefile: find node path using shell/which (#109, @timoxley)
|
||||
|
||||
1.0.1 / 2014-06-06
|
||||
==================
|
||||
|
||||
* browser: use `removeItem()` to clear localStorage
|
||||
* browser, node: don't set DEBUG if namespaces is undefined (#107, @leedm777)
|
||||
* package: add "contributors" section
|
||||
* node: fix comment typo
|
||||
* README: list authors
|
||||
|
||||
1.0.0 / 2014-06-04
|
||||
==================
|
||||
|
||||
* make ms diff be global, not be scope
|
||||
* debug: ignore empty strings in enable()
|
||||
* node: make DEBUG_COLORS able to disable coloring
|
||||
* *: export the `colors` array
|
||||
* npmignore: don't publish the `dist` dir
|
||||
* Makefile: refactor to use browserify
|
||||
* package: add "browserify" as a dev dependency
|
||||
* Readme: add Web Inspector Colors section
|
||||
* node: reset terminal color for the debug content
|
||||
* node: map "%o" to `util.inspect()`
|
||||
* browser: map "%j" to `JSON.stringify()`
|
||||
* debug: add custom "formatters"
|
||||
* debug: use "ms" module for humanizing the diff
|
||||
* Readme: add "bash" syntax highlighting
|
||||
* browser: add Firebug color support
|
||||
* browser: add colors for WebKit browsers
|
||||
* node: apply log to `console`
|
||||
* rewrite: abstract common logic for Node & browsers
|
||||
* add .jshintrc file
|
||||
|
||||
0.8.1 / 2014-04-14
|
||||
==================
|
||||
|
||||
* package: re-add the "component" section
|
||||
|
||||
0.8.0 / 2014-03-30
|
||||
==================
|
||||
|
||||
* add `enable()` method for nodejs. Closes #27
|
||||
* change from stderr to stdout
|
||||
* remove unnecessary index.js file
|
||||
|
||||
0.7.4 / 2013-11-13
|
||||
==================
|
||||
|
||||
* remove "browserify" key from package.json (fixes something in browserify)
|
||||
|
||||
0.7.3 / 2013-10-30
|
||||
==================
|
||||
|
||||
* fix: catch localStorage security error when cookies are blocked (Chrome)
|
||||
* add debug(err) support. Closes #46
|
||||
* add .browser prop to package.json. Closes #42
|
||||
|
||||
0.7.2 / 2013-02-06
|
||||
==================
|
||||
|
||||
* fix package.json
|
||||
* fix: Mobile Safari (private mode) is broken with debug
|
||||
* fix: Use unicode to send escape character to shell instead of octal to work with strict mode javascript
|
||||
|
||||
0.7.1 / 2013-02-05
|
||||
==================
|
||||
|
||||
* add repository URL to package.json
|
||||
* add DEBUG_COLORED to force colored output
|
||||
* add browserify support
|
||||
* fix component. Closes #24
|
||||
|
||||
0.7.0 / 2012-05-04
|
||||
==================
|
||||
|
||||
* Added .component to package.json
|
||||
* Added debug.component.js build
|
||||
|
||||
0.6.0 / 2012-03-16
|
||||
==================
|
||||
|
||||
* Added support for "-" prefix in DEBUG [Vinay Pulim]
|
||||
* Added `.enabled` flag to the node version [TooTallNate]
|
||||
|
||||
0.5.0 / 2012-02-02
|
||||
==================
|
||||
|
||||
* Added: humanize diffs. Closes #8
|
||||
* Added `debug.disable()` to the CS variant
|
||||
* Removed padding. Closes #10
|
||||
* Fixed: persist client-side variant again. Closes #9
|
||||
|
||||
0.4.0 / 2012-02-01
|
||||
==================
|
||||
|
||||
* Added browser variant support for older browsers [TooTallNate]
|
||||
* Added `debug.enable('project:*')` to browser variant [TooTallNate]
|
||||
* Added padding to diff (moved it to the right)
|
||||
|
||||
0.3.0 / 2012-01-26
|
||||
==================
|
||||
|
||||
* Added millisecond diff when isatty, otherwise UTC string
|
||||
|
||||
0.2.0 / 2012-01-22
|
||||
==================
|
||||
|
||||
* Added wildcard support
|
||||
|
||||
0.1.0 / 2011-12-02
|
||||
==================
|
||||
|
||||
* Added: remove colors unless stderr isatty [TooTallNate]
|
||||
|
||||
0.0.1 / 2010-01-03
|
||||
==================
|
||||
|
||||
* Initial release
|
19
node_modules/needle/node_modules/debug/LICENSE
generated
vendored
19
node_modules/needle/node_modules/debug/LICENSE
generated
vendored
|
@ -1,19 +0,0 @@
|
|||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014 TJ Holowaychuk <tj@vision-media.ca>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
|
||||
and associated documentation files (the 'Software'), to deal in the Software without restriction,
|
||||
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||
portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
437
node_modules/needle/node_modules/debug/README.md
generated
vendored
437
node_modules/needle/node_modules/debug/README.md
generated
vendored
|
@ -1,437 +0,0 @@
|
|||
# debug
|
||||
[![Build Status](https://travis-ci.org/visionmedia/debug.svg?branch=master)](https://travis-ci.org/visionmedia/debug) [![Coverage Status](https://coveralls.io/repos/github/visionmedia/debug/badge.svg?branch=master)](https://coveralls.io/github/visionmedia/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers)
|
||||
[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors)
|
||||
|
||||
<img width="647" src="https://user-images.githubusercontent.com/71256/29091486-fa38524c-7c37-11e7-895f-e7ec8e1039b6.png">
|
||||
|
||||
A tiny JavaScript debugging utility modelled after Node.js core's debugging
|
||||
technique. Works in Node.js and web browsers.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
$ npm install debug
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole.
|
||||
|
||||
Example [_app.js_](./examples/node/app.js):
|
||||
|
||||
```js
|
||||
var debug = require('debug')('http')
|
||||
, http = require('http')
|
||||
, name = 'My App';
|
||||
|
||||
// fake app
|
||||
|
||||
debug('booting %o', name);
|
||||
|
||||
http.createServer(function(req, res){
|
||||
debug(req.method + ' ' + req.url);
|
||||
res.end('hello\n');
|
||||
}).listen(3000, function(){
|
||||
debug('listening');
|
||||
});
|
||||
|
||||
// fake worker of some kind
|
||||
|
||||
require('./worker');
|
||||
```
|
||||
|
||||
Example [_worker.js_](./examples/node/worker.js):
|
||||
|
||||
```js
|
||||
var a = require('debug')('worker:a')
|
||||
, b = require('debug')('worker:b');
|
||||
|
||||
function work() {
|
||||
a('doing lots of uninteresting work');
|
||||
setTimeout(work, Math.random() * 1000);
|
||||
}
|
||||
|
||||
work();
|
||||
|
||||
function workb() {
|
||||
b('doing some work');
|
||||
setTimeout(workb, Math.random() * 2000);
|
||||
}
|
||||
|
||||
workb();
|
||||
```
|
||||
|
||||
The `DEBUG` environment variable is then used to enable these based on space or
|
||||
comma-delimited names.
|
||||
|
||||
Here are some examples:
|
||||
|
||||
<img width="647" alt="screen shot 2017-08-08 at 12 53 04 pm" src="https://user-images.githubusercontent.com/71256/29091703-a6302cdc-7c38-11e7-8304-7c0b3bc600cd.png">
|
||||
<img width="647" alt="screen shot 2017-08-08 at 12 53 38 pm" src="https://user-images.githubusercontent.com/71256/29091700-a62a6888-7c38-11e7-800b-db911291ca2b.png">
|
||||
<img width="647" alt="screen shot 2017-08-08 at 12 53 25 pm" src="https://user-images.githubusercontent.com/71256/29091701-a62ea114-7c38-11e7-826a-2692bedca740.png">
|
||||
|
||||
#### Windows command prompt notes
|
||||
|
||||
##### CMD
|
||||
|
||||
On Windows the environment variable is set using the `set` command.
|
||||
|
||||
```cmd
|
||||
set DEBUG=*,-not_this
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```cmd
|
||||
set DEBUG=* & node app.js
|
||||
```
|
||||
|
||||
##### PowerShell (VS Code default)
|
||||
|
||||
PowerShell uses different syntax to set environment variables.
|
||||
|
||||
```cmd
|
||||
$env:DEBUG = "*,-not_this"
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```cmd
|
||||
$env:DEBUG='app';node app.js
|
||||
```
|
||||
|
||||
Then, run the program to be debugged as usual.
|
||||
|
||||
npm script example:
|
||||
```js
|
||||
"windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js",
|
||||
```
|
||||
|
||||
## Namespace Colors
|
||||
|
||||
Every debug instance has a color generated for it based on its namespace name.
|
||||
This helps when visually parsing the debug output to identify which debug instance
|
||||
a debug line belongs to.
|
||||
|
||||
#### Node.js
|
||||
|
||||
In Node.js, colors are enabled when stderr is a TTY. You also _should_ install
|
||||
the [`supports-color`](https://npmjs.org/supports-color) module alongside debug,
|
||||
otherwise debug will only use a small handful of basic colors.
|
||||
|
||||
<img width="521" src="https://user-images.githubusercontent.com/71256/29092181-47f6a9e6-7c3a-11e7-9a14-1928d8a711cd.png">
|
||||
|
||||
#### Web Browser
|
||||
|
||||
Colors are also enabled on "Web Inspectors" that understand the `%c` formatting
|
||||
option. These are WebKit web inspectors, Firefox ([since version
|
||||
31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/))
|
||||
and the Firebug plugin for Firefox (any version).
|
||||
|
||||
<img width="524" src="https://user-images.githubusercontent.com/71256/29092033-b65f9f2e-7c39-11e7-8e32-f6f0d8e865c1.png">
|
||||
|
||||
|
||||
## Millisecond diff
|
||||
|
||||
When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls.
|
||||
|
||||
<img width="647" src="https://user-images.githubusercontent.com/71256/29091486-fa38524c-7c37-11e7-895f-e7ec8e1039b6.png">
|
||||
|
||||
When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below:
|
||||
|
||||
<img width="647" src="https://user-images.githubusercontent.com/71256/29091956-6bd78372-7c39-11e7-8c55-c948396d6edd.png">
|
||||
|
||||
|
||||
## Conventions
|
||||
|
||||
If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output.
|
||||
|
||||
## Wildcards
|
||||
|
||||
The `*` character may be used as a wildcard. Suppose for example your library has
|
||||
debuggers named "connect:bodyParser", "connect:compress", "connect:session",
|
||||
instead of listing all three with
|
||||
`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do
|
||||
`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`.
|
||||
|
||||
You can also exclude specific debuggers by prefixing them with a "-" character.
|
||||
For example, `DEBUG=*,-connect:*` would include all debuggers except those
|
||||
starting with "connect:".
|
||||
|
||||
## Environment Variables
|
||||
|
||||
When running through Node.js, you can set a few environment variables that will
|
||||
change the behavior of the debug logging:
|
||||
|
||||
| Name | Purpose |
|
||||
|-----------|-------------------------------------------------|
|
||||
| `DEBUG` | Enables/disables specific debugging namespaces. |
|
||||
| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). |
|
||||
| `DEBUG_COLORS`| Whether or not to use colors in the debug output. |
|
||||
| `DEBUG_DEPTH` | Object inspection depth. |
|
||||
| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. |
|
||||
|
||||
|
||||
__Note:__ The environment variables beginning with `DEBUG_` end up being
|
||||
converted into an Options object that gets used with `%o`/`%O` formatters.
|
||||
See the Node.js documentation for
|
||||
[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options)
|
||||
for the complete list.
|
||||
|
||||
## Formatters
|
||||
|
||||
Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting.
|
||||
Below are the officially supported formatters:
|
||||
|
||||
| Formatter | Representation |
|
||||
|-----------|----------------|
|
||||
| `%O` | Pretty-print an Object on multiple lines. |
|
||||
| `%o` | Pretty-print an Object all on a single line. |
|
||||
| `%s` | String. |
|
||||
| `%d` | Number (both integer and float). |
|
||||
| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. |
|
||||
| `%%` | Single percent sign ('%'). This does not consume an argument. |
|
||||
|
||||
|
||||
### Custom formatters
|
||||
|
||||
You can add custom formatters by extending the `debug.formatters` object.
|
||||
For example, if you wanted to add support for rendering a Buffer as hex with
|
||||
`%h`, you could do something like:
|
||||
|
||||
```js
|
||||
const createDebug = require('debug')
|
||||
createDebug.formatters.h = (v) => {
|
||||
return v.toString('hex')
|
||||
}
|
||||
|
||||
// …elsewhere
|
||||
const debug = createDebug('foo')
|
||||
debug('this is hex: %h', new Buffer('hello world'))
|
||||
// foo this is hex: 68656c6c6f20776f726c6421 +0ms
|
||||
```
|
||||
|
||||
|
||||
## Browser Support
|
||||
|
||||
You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify),
|
||||
or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest),
|
||||
if you don't want to build it yourself.
|
||||
|
||||
Debug's enable state is currently persisted by `localStorage`.
|
||||
Consider the situation shown below where you have `worker:a` and `worker:b`,
|
||||
and wish to debug both. You can enable this using `localStorage.debug`:
|
||||
|
||||
```js
|
||||
localStorage.debug = 'worker:*'
|
||||
```
|
||||
|
||||
And then refresh the page.
|
||||
|
||||
```js
|
||||
a = debug('worker:a');
|
||||
b = debug('worker:b');
|
||||
|
||||
setInterval(function(){
|
||||
a('doing some work');
|
||||
}, 1000);
|
||||
|
||||
setInterval(function(){
|
||||
b('doing some work');
|
||||
}, 1200);
|
||||
```
|
||||
|
||||
|
||||
## Output streams
|
||||
|
||||
By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method:
|
||||
|
||||
Example [_stdout.js_](./examples/node/stdout.js):
|
||||
|
||||
```js
|
||||
var debug = require('debug');
|
||||
var error = debug('app:error');
|
||||
|
||||
// by default stderr is used
|
||||
error('goes to stderr!');
|
||||
|
||||
var log = debug('app:log');
|
||||
// set this namespace to log via console.log
|
||||
log.log = console.log.bind(console); // don't forget to bind to console!
|
||||
log('goes to stdout');
|
||||
error('still goes to stderr!');
|
||||
|
||||
// set all output to go via console.info
|
||||
// overrides all per-namespace log settings
|
||||
debug.log = console.info.bind(console);
|
||||
error('now goes to stdout via console.info');
|
||||
log('still goes to stdout, but via console.info now');
|
||||
```
|
||||
|
||||
## Extend
|
||||
You can simply extend debugger
|
||||
```js
|
||||
const log = require('debug')('auth');
|
||||
|
||||
//creates new debug instance with extended namespace
|
||||
const logSign = log.extend('sign');
|
||||
const logLogin = log.extend('login');
|
||||
|
||||
log('hello'); // auth hello
|
||||
logSign('hello'); //auth:sign hello
|
||||
logLogin('hello'); //auth:login hello
|
||||
```
|
||||
|
||||
## Set dynamically
|
||||
|
||||
You can also enable debug dynamically by calling the `enable()` method :
|
||||
|
||||
```js
|
||||
let debug = require('debug');
|
||||
|
||||
console.log(1, debug.enabled('test'));
|
||||
|
||||
debug.enable('test');
|
||||
console.log(2, debug.enabled('test'));
|
||||
|
||||
debug.disable();
|
||||
console.log(3, debug.enabled('test'));
|
||||
|
||||
```
|
||||
|
||||
print :
|
||||
```
|
||||
1 false
|
||||
2 true
|
||||
3 false
|
||||
```
|
||||
|
||||
Usage :
|
||||
`enable(namespaces)`
|
||||
`namespaces` can include modes separated by a colon and wildcards.
|
||||
|
||||
Note that calling `enable()` completely overrides previously set DEBUG variable :
|
||||
|
||||
```
|
||||
$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))'
|
||||
=> false
|
||||
```
|
||||
|
||||
## Checking whether a debug target is enabled
|
||||
|
||||
After you've created a debug instance, you can determine whether or not it is
|
||||
enabled by checking the `enabled` property:
|
||||
|
||||
```javascript
|
||||
const debug = require('debug')('http');
|
||||
|
||||
if (debug.enabled) {
|
||||
// do stuff...
|
||||
}
|
||||
```
|
||||
|
||||
You can also manually toggle this property to force the debug instance to be
|
||||
enabled or disabled.
|
||||
|
||||
|
||||
## Authors
|
||||
|
||||
- TJ Holowaychuk
|
||||
- Nathan Rajlich
|
||||
- Andrew Rhyne
|
||||
|
||||
## Backers
|
||||
|
||||
Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)]
|
||||
|
||||
<a href="https://opencollective.com/debug/backer/0/website" target="_blank"><img src="https://opencollective.com/debug/backer/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/1/website" target="_blank"><img src="https://opencollective.com/debug/backer/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/2/website" target="_blank"><img src="https://opencollective.com/debug/backer/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/3/website" target="_blank"><img src="https://opencollective.com/debug/backer/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/4/website" target="_blank"><img src="https://opencollective.com/debug/backer/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/5/website" target="_blank"><img src="https://opencollective.com/debug/backer/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/6/website" target="_blank"><img src="https://opencollective.com/debug/backer/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/7/website" target="_blank"><img src="https://opencollective.com/debug/backer/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/8/website" target="_blank"><img src="https://opencollective.com/debug/backer/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/9/website" target="_blank"><img src="https://opencollective.com/debug/backer/9/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/10/website" target="_blank"><img src="https://opencollective.com/debug/backer/10/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/11/website" target="_blank"><img src="https://opencollective.com/debug/backer/11/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/12/website" target="_blank"><img src="https://opencollective.com/debug/backer/12/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/13/website" target="_blank"><img src="https://opencollective.com/debug/backer/13/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/14/website" target="_blank"><img src="https://opencollective.com/debug/backer/14/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/15/website" target="_blank"><img src="https://opencollective.com/debug/backer/15/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/16/website" target="_blank"><img src="https://opencollective.com/debug/backer/16/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/17/website" target="_blank"><img src="https://opencollective.com/debug/backer/17/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/18/website" target="_blank"><img src="https://opencollective.com/debug/backer/18/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/19/website" target="_blank"><img src="https://opencollective.com/debug/backer/19/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/20/website" target="_blank"><img src="https://opencollective.com/debug/backer/20/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/21/website" target="_blank"><img src="https://opencollective.com/debug/backer/21/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/22/website" target="_blank"><img src="https://opencollective.com/debug/backer/22/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/23/website" target="_blank"><img src="https://opencollective.com/debug/backer/23/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/24/website" target="_blank"><img src="https://opencollective.com/debug/backer/24/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/25/website" target="_blank"><img src="https://opencollective.com/debug/backer/25/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/26/website" target="_blank"><img src="https://opencollective.com/debug/backer/26/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/27/website" target="_blank"><img src="https://opencollective.com/debug/backer/27/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/28/website" target="_blank"><img src="https://opencollective.com/debug/backer/28/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/backer/29/website" target="_blank"><img src="https://opencollective.com/debug/backer/29/avatar.svg"></a>
|
||||
|
||||
|
||||
## Sponsors
|
||||
|
||||
Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)]
|
||||
|
||||
<a href="https://opencollective.com/debug/sponsor/0/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/0/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/1/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/1/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/2/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/2/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/3/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/3/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/4/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/4/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/5/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/5/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/6/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/7/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/8/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/9/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/9/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/10/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/10/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/11/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/11/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/12/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/12/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/13/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/13/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/14/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/14/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/15/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/15/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/16/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/16/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/17/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/17/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/18/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/18/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/19/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/19/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/20/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/20/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/21/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/21/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/22/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/22/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/23/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/23/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/24/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/24/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/25/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/25/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/26/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/26/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/27/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/27/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/28/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/28/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/debug/sponsor/29/website" target="_blank"><img src="https://opencollective.com/debug/sponsor/29/avatar.svg"></a>
|
||||
|
||||
## License
|
||||
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
1
node_modules/needle/node_modules/debug/node.js
generated
vendored
1
node_modules/needle/node_modules/debug/node.js
generated
vendored
|
@ -1 +0,0 @@
|
|||
module.exports = require('./src/node');
|
51
node_modules/needle/node_modules/debug/package.json
generated
vendored
51
node_modules/needle/node_modules/debug/package.json
generated
vendored
|
@ -1,51 +0,0 @@
|
|||
{
|
||||
"name": "debug",
|
||||
"version": "3.2.7",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/visionmedia/debug.git"
|
||||
},
|
||||
"description": "small debugging utility",
|
||||
"keywords": [
|
||||
"debug",
|
||||
"log",
|
||||
"debugger"
|
||||
],
|
||||
"files": [
|
||||
"src",
|
||||
"node.js",
|
||||
"dist/debug.js",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"author": "TJ Holowaychuk <tj@vision-media.ca>",
|
||||
"contributors": [
|
||||
"Nathan Rajlich <nathan@tootallnate.net> (http://n8.io)",
|
||||
"Andrew Rhyne <rhyneandrew@gmail.com>"
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"browserify": "14.4.0",
|
||||
"chai": "^3.5.0",
|
||||
"concurrently": "^3.1.0",
|
||||
"coveralls": "^3.0.2",
|
||||
"istanbul": "^0.4.5",
|
||||
"karma": "^3.0.0",
|
||||
"karma-chai": "^0.1.0",
|
||||
"karma-mocha": "^1.3.0",
|
||||
"karma-phantomjs-launcher": "^1.0.2",
|
||||
"mocha": "^5.2.0",
|
||||
"mocha-lcov-reporter": "^1.2.0",
|
||||
"rimraf": "^2.5.4",
|
||||
"xo": "^0.23.0"
|
||||
},
|
||||
"main": "./src/index.js",
|
||||
"browser": "./src/browser.js",
|
||||
"unpkg": "./dist/debug.js"
|
||||
}
|
180
node_modules/needle/node_modules/debug/src/browser.js
generated
vendored
180
node_modules/needle/node_modules/debug/src/browser.js
generated
vendored
|
@ -1,180 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
/* eslint-env browser */
|
||||
|
||||
/**
|
||||
* This is the web browser implementation of `debug()`.
|
||||
*/
|
||||
exports.log = log;
|
||||
exports.formatArgs = formatArgs;
|
||||
exports.save = save;
|
||||
exports.load = load;
|
||||
exports.useColors = useColors;
|
||||
exports.storage = localstorage();
|
||||
/**
|
||||
* Colors.
|
||||
*/
|
||||
|
||||
exports.colors = ['#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33'];
|
||||
/**
|
||||
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
|
||||
* and the Firebug extension (any Firefox version) are known
|
||||
* to support "%c" CSS customizations.
|
||||
*
|
||||
* TODO: add a `localStorage` variable to explicitly enable/disable colors
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
|
||||
function useColors() {
|
||||
// NB: In an Electron preload script, document will be defined but not fully
|
||||
// initialized. Since we know we're in Chrome, we'll just detect this case
|
||||
// explicitly
|
||||
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
|
||||
return true;
|
||||
} // Internet Explorer and Edge do not support colors.
|
||||
|
||||
|
||||
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
|
||||
return false;
|
||||
} // Is webkit? http://stackoverflow.com/a/16459606/376773
|
||||
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
|
||||
|
||||
|
||||
return typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773
|
||||
typeof window !== 'undefined' && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31?
|
||||
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
|
||||
typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker
|
||||
typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/);
|
||||
}
|
||||
/**
|
||||
* Colorize log arguments if enabled.
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
|
||||
|
||||
function formatArgs(args) {
|
||||
args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff);
|
||||
|
||||
if (!this.useColors) {
|
||||
return;
|
||||
}
|
||||
|
||||
var c = 'color: ' + this.color;
|
||||
args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other
|
||||
// arguments passed either before or after the %c, so we need to
|
||||
// figure out the correct index to insert the CSS into
|
||||
|
||||
var index = 0;
|
||||
var lastC = 0;
|
||||
args[0].replace(/%[a-zA-Z%]/g, function (match) {
|
||||
if (match === '%%') {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
|
||||
if (match === '%c') {
|
||||
// We only are interested in the *last* %c
|
||||
// (the user may have provided their own)
|
||||
lastC = index;
|
||||
}
|
||||
});
|
||||
args.splice(lastC, 0, c);
|
||||
}
|
||||
/**
|
||||
* Invokes `console.log()` when available.
|
||||
* No-op when `console.log` is not a "function".
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
|
||||
|
||||
function log() {
|
||||
var _console;
|
||||
|
||||
// This hackery is required for IE8/9, where
|
||||
// the `console.log` function doesn't have 'apply'
|
||||
return (typeof console === "undefined" ? "undefined" : _typeof(console)) === 'object' && console.log && (_console = console).log.apply(_console, arguments);
|
||||
}
|
||||
/**
|
||||
* Save `namespaces`.
|
||||
*
|
||||
* @param {String} namespaces
|
||||
* @api private
|
||||
*/
|
||||
|
||||
|
||||
function save(namespaces) {
|
||||
try {
|
||||
if (namespaces) {
|
||||
exports.storage.setItem('debug', namespaces);
|
||||
} else {
|
||||
exports.storage.removeItem('debug');
|
||||
}
|
||||
} catch (error) {// Swallow
|
||||
// XXX (@Qix-) should we be logging these?
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Load `namespaces`.
|
||||
*
|
||||
* @return {String} returns the previously persisted debug modes
|
||||
* @api private
|
||||
*/
|
||||
|
||||
|
||||
function load() {
|
||||
var r;
|
||||
|
||||
try {
|
||||
r = exports.storage.getItem('debug');
|
||||
} catch (error) {} // Swallow
|
||||
// XXX (@Qix-) should we be logging these?
|
||||
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
|
||||
|
||||
|
||||
if (!r && typeof process !== 'undefined' && 'env' in process) {
|
||||
r = process.env.DEBUG;
|
||||
}
|
||||
|
||||
return r;
|
||||
}
|
||||
/**
|
||||
* Localstorage attempts to return the localstorage.
|
||||
*
|
||||
* This is necessary because safari throws
|
||||
* when a user disables cookies/localstorage
|
||||
* and you attempt to access it.
|
||||
*
|
||||
* @return {LocalStorage}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
|
||||
function localstorage() {
|
||||
try {
|
||||
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
|
||||
// The Browser also has localStorage in the global context.
|
||||
return localStorage;
|
||||
} catch (error) {// Swallow
|
||||
// XXX (@Qix-) should we be logging these?
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = require('./common')(exports);
|
||||
var formatters = module.exports.formatters;
|
||||
/**
|
||||
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
|
||||
*/
|
||||
|
||||
formatters.j = function (v) {
|
||||
try {
|
||||
return JSON.stringify(v);
|
||||
} catch (error) {
|
||||
return '[UnexpectedJSONParseError]: ' + error.message;
|
||||
}
|
||||
};
|
||||
|
249
node_modules/needle/node_modules/debug/src/common.js
generated
vendored
249
node_modules/needle/node_modules/debug/src/common.js
generated
vendored
|
@ -1,249 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
/**
|
||||
* This is the common logic for both the Node.js and web browser
|
||||
* implementations of `debug()`.
|
||||
*/
|
||||
function setup(env) {
|
||||
createDebug.debug = createDebug;
|
||||
createDebug.default = createDebug;
|
||||
createDebug.coerce = coerce;
|
||||
createDebug.disable = disable;
|
||||
createDebug.enable = enable;
|
||||
createDebug.enabled = enabled;
|
||||
createDebug.humanize = require('ms');
|
||||
Object.keys(env).forEach(function (key) {
|
||||
createDebug[key] = env[key];
|
||||
});
|
||||
/**
|
||||
* Active `debug` instances.
|
||||
*/
|
||||
|
||||
createDebug.instances = [];
|
||||
/**
|
||||
* The currently active debug mode names, and names to skip.
|
||||
*/
|
||||
|
||||
createDebug.names = [];
|
||||
createDebug.skips = [];
|
||||
/**
|
||||
* Map of special "%n" handling functions, for the debug "format" argument.
|
||||
*
|
||||
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
|
||||
*/
|
||||
|
||||
createDebug.formatters = {};
|
||||
/**
|
||||
* Selects a color for a debug namespace
|
||||
* @param {String} namespace The namespace string for the for the debug instance to be colored
|
||||
* @return {Number|String} An ANSI color code for the given namespace
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function selectColor(namespace) {
|
||||
var hash = 0;
|
||||
|
||||
for (var i = 0; i < namespace.length; i++) {
|
||||
hash = (hash << 5) - hash + namespace.charCodeAt(i);
|
||||
hash |= 0; // Convert to 32bit integer
|
||||
}
|
||||
|
||||
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
|
||||
}
|
||||
|
||||
createDebug.selectColor = selectColor;
|
||||
/**
|
||||
* Create a debugger with the given `namespace`.
|
||||
*
|
||||
* @param {String} namespace
|
||||
* @return {Function}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function createDebug(namespace) {
|
||||
var prevTime;
|
||||
|
||||
function debug() {
|
||||
// Disabled?
|
||||
if (!debug.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||||
args[_key] = arguments[_key];
|
||||
}
|
||||
|
||||
var self = debug; // Set `diff` timestamp
|
||||
|
||||
var curr = Number(new Date());
|
||||
var ms = curr - (prevTime || curr);
|
||||
self.diff = ms;
|
||||
self.prev = prevTime;
|
||||
self.curr = curr;
|
||||
prevTime = curr;
|
||||
args[0] = createDebug.coerce(args[0]);
|
||||
|
||||
if (typeof args[0] !== 'string') {
|
||||
// Anything else let's inspect with %O
|
||||
args.unshift('%O');
|
||||
} // Apply any `formatters` transformations
|
||||
|
||||
|
||||
var index = 0;
|
||||
args[0] = args[0].replace(/%([a-zA-Z%])/g, function (match, format) {
|
||||
// If we encounter an escaped % then don't increase the array index
|
||||
if (match === '%%') {
|
||||
return match;
|
||||
}
|
||||
|
||||
index++;
|
||||
var formatter = createDebug.formatters[format];
|
||||
|
||||
if (typeof formatter === 'function') {
|
||||
var val = args[index];
|
||||
match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format`
|
||||
|
||||
args.splice(index, 1);
|
||||
index--;
|
||||
}
|
||||
|
||||
return match;
|
||||
}); // Apply env-specific formatting (colors, etc.)
|
||||
|
||||
createDebug.formatArgs.call(self, args);
|
||||
var logFn = self.log || createDebug.log;
|
||||
logFn.apply(self, args);
|
||||
}
|
||||
|
||||
debug.namespace = namespace;
|
||||
debug.enabled = createDebug.enabled(namespace);
|
||||
debug.useColors = createDebug.useColors();
|
||||
debug.color = selectColor(namespace);
|
||||
debug.destroy = destroy;
|
||||
debug.extend = extend; // Debug.formatArgs = formatArgs;
|
||||
// debug.rawLog = rawLog;
|
||||
// env-specific initialization logic for debug instances
|
||||
|
||||
if (typeof createDebug.init === 'function') {
|
||||
createDebug.init(debug);
|
||||
}
|
||||
|
||||
createDebug.instances.push(debug);
|
||||
return debug;
|
||||
}
|
||||
|
||||
function destroy() {
|
||||
var index = createDebug.instances.indexOf(this);
|
||||
|
||||
if (index !== -1) {
|
||||
createDebug.instances.splice(index, 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function extend(namespace, delimiter) {
|
||||
return createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
|
||||
}
|
||||
/**
|
||||
* Enables a debug mode by namespaces. This can include modes
|
||||
* separated by a colon and wildcards.
|
||||
*
|
||||
* @param {String} namespaces
|
||||
* @api public
|
||||
*/
|
||||
|
||||
|
||||
function enable(namespaces) {
|
||||
createDebug.save(namespaces);
|
||||
createDebug.names = [];
|
||||
createDebug.skips = [];
|
||||
var i;
|
||||
var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
|
||||
var len = split.length;
|
||||
|
||||
for (i = 0; i < len; i++) {
|
||||
if (!split[i]) {
|
||||
// ignore empty strings
|
||||
continue;
|
||||
}
|
||||
|
||||
namespaces = split[i].replace(/\*/g, '.*?');
|
||||
|
||||
if (namespaces[0] === '-') {
|
||||
createDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));
|
||||
} else {
|
||||
createDebug.names.push(new RegExp('^' + namespaces + '$'));
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0; i < createDebug.instances.length; i++) {
|
||||
var instance = createDebug.instances[i];
|
||||
instance.enabled = createDebug.enabled(instance.namespace);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Disable debug output.
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
|
||||
|
||||
function disable() {
|
||||
createDebug.enable('');
|
||||
}
|
||||
/**
|
||||
* Returns true if the given mode name is enabled, false otherwise.
|
||||
*
|
||||
* @param {String} name
|
||||
* @return {Boolean}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
|
||||
function enabled(name) {
|
||||
if (name[name.length - 1] === '*') {
|
||||
return true;
|
||||
}
|
||||
|
||||
var i;
|
||||
var len;
|
||||
|
||||
for (i = 0, len = createDebug.skips.length; i < len; i++) {
|
||||
if (createDebug.skips[i].test(name)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0, len = createDebug.names.length; i < len; i++) {
|
||||
if (createDebug.names[i].test(name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Coerce `val`.
|
||||
*
|
||||
* @param {Mixed} val
|
||||
* @return {Mixed}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
|
||||
function coerce(val) {
|
||||
if (val instanceof Error) {
|
||||
return val.stack || val.message;
|
||||
}
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
createDebug.enable(createDebug.load());
|
||||
return createDebug;
|
||||
}
|
||||
|
||||
module.exports = setup;
|
||||
|
12
node_modules/needle/node_modules/debug/src/index.js
generated
vendored
12
node_modules/needle/node_modules/debug/src/index.js
generated
vendored
|
@ -1,12 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
/**
|
||||
* Detect Electron renderer / nwjs process, which is node, but we should
|
||||
* treat as a browser.
|
||||
*/
|
||||
if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {
|
||||
module.exports = require('./browser.js');
|
||||
} else {
|
||||
module.exports = require('./node.js');
|
||||
}
|
||||
|
177
node_modules/needle/node_modules/debug/src/node.js
generated
vendored
177
node_modules/needle/node_modules/debug/src/node.js
generated
vendored
|
@ -1,177 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
var tty = require('tty');
|
||||
|
||||
var util = require('util');
|
||||
/**
|
||||
* This is the Node.js implementation of `debug()`.
|
||||
*/
|
||||
|
||||
|
||||
exports.init = init;
|
||||
exports.log = log;
|
||||
exports.formatArgs = formatArgs;
|
||||
exports.save = save;
|
||||
exports.load = load;
|
||||
exports.useColors = useColors;
|
||||
/**
|
||||
* Colors.
|
||||
*/
|
||||
|
||||
exports.colors = [6, 2, 3, 4, 5, 1];
|
||||
|
||||
try {
|
||||
// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
var supportsColor = require('supports-color');
|
||||
|
||||
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
|
||||
exports.colors = [20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221];
|
||||
}
|
||||
} catch (error) {} // Swallow - we only care if `supports-color` is available; it doesn't have to be.
|
||||
|
||||
/**
|
||||
* Build up the default `inspectOpts` object from the environment variables.
|
||||
*
|
||||
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
|
||||
*/
|
||||
|
||||
|
||||
exports.inspectOpts = Object.keys(process.env).filter(function (key) {
|
||||
return /^debug_/i.test(key);
|
||||
}).reduce(function (obj, key) {
|
||||
// Camel-case
|
||||
var prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, function (_, k) {
|
||||
return k.toUpperCase();
|
||||
}); // Coerce string value into JS value
|
||||
|
||||
var val = process.env[key];
|
||||
|
||||
if (/^(yes|on|true|enabled)$/i.test(val)) {
|
||||
val = true;
|
||||
} else if (/^(no|off|false|disabled)$/i.test(val)) {
|
||||
val = false;
|
||||
} else if (val === 'null') {
|
||||
val = null;
|
||||
} else {
|
||||
val = Number(val);
|
||||
}
|
||||
|
||||
obj[prop] = val;
|
||||
return obj;
|
||||
}, {});
|
||||
/**
|
||||
* Is stdout a TTY? Colored output is enabled when `true`.
|
||||
*/
|
||||
|
||||
function useColors() {
|
||||
return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd);
|
||||
}
|
||||
/**
|
||||
* Adds ANSI color escape codes if enabled.
|
||||
*
|
||||
* @api public
|
||||
*/
|
||||
|
||||
|
||||
function formatArgs(args) {
|
||||
var name = this.namespace,
|
||||
useColors = this.useColors;
|
||||
|
||||
if (useColors) {
|
||||
var c = this.color;
|
||||
var colorCode = "\x1B[3" + (c < 8 ? c : '8;5;' + c);
|
||||
var prefix = " ".concat(colorCode, ";1m").concat(name, " \x1B[0m");
|
||||
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
|
||||
args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + "\x1B[0m");
|
||||
} else {
|
||||
args[0] = getDate() + name + ' ' + args[0];
|
||||
}
|
||||
}
|
||||
|
||||
function getDate() {
|
||||
if (exports.inspectOpts.hideDate) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return new Date().toISOString() + ' ';
|
||||
}
|
||||
/**
|
||||
* Invokes `util.format()` with the specified arguments and writes to stderr.
|
||||
*/
|
||||
|
||||
|
||||
function log() {
|
||||
return process.stderr.write(util.format.apply(util, arguments) + '\n');
|
||||
}
|
||||
/**
|
||||
* Save `namespaces`.
|
||||
*
|
||||
* @param {String} namespaces
|
||||
* @api private
|
||||
*/
|
||||
|
||||
|
||||
function save(namespaces) {
|
||||
if (namespaces) {
|
||||
process.env.DEBUG = namespaces;
|
||||
} else {
|
||||
// If you set a process.env field to null or undefined, it gets cast to the
|
||||
// string 'null' or 'undefined'. Just delete instead.
|
||||
delete process.env.DEBUG;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Load `namespaces`.
|
||||
*
|
||||
* @return {String} returns the previously persisted debug modes
|
||||
* @api private
|
||||
*/
|
||||
|
||||
|
||||
function load() {
|
||||
return process.env.DEBUG;
|
||||
}
|
||||
/**
|
||||
* Init logic for `debug` instances.
|
||||
*
|
||||
* Create a new `inspectOpts` object in case `useColors` is set
|
||||
* differently for a particular `debug` instance.
|
||||
*/
|
||||
|
||||
|
||||
function init(debug) {
|
||||
debug.inspectOpts = {};
|
||||
var keys = Object.keys(exports.inspectOpts);
|
||||
|
||||
for (var i = 0; i < keys.length; i++) {
|
||||
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = require('./common')(exports);
|
||||
var formatters = module.exports.formatters;
|
||||
/**
|
||||
* Map %o to `util.inspect()`, all on a single line.
|
||||
*/
|
||||
|
||||
formatters.o = function (v) {
|
||||
this.inspectOpts.colors = this.useColors;
|
||||
return util.inspect(v, this.inspectOpts)
|
||||
.split('\n')
|
||||
.map(function (str) { return str.trim(); })
|
||||
.join(' ');
|
||||
};
|
||||
/**
|
||||
* Map %O to `util.inspect()`, allowing multiple lines if needed.
|
||||
*/
|
||||
|
||||
|
||||
formatters.O = function (v) {
|
||||
this.inspectOpts.colors = this.useColors;
|
||||
return util.inspect(v, this.inspectOpts);
|
||||
};
|
||||
|
162
node_modules/needle/node_modules/ms/index.js
generated
vendored
162
node_modules/needle/node_modules/ms/index.js
generated
vendored
|
@ -1,162 +0,0 @@
|
|||
/**
|
||||
* Helpers.
|
||||
*/
|
||||
|
||||
var s = 1000;
|
||||
var m = s * 60;
|
||||
var h = m * 60;
|
||||
var d = h * 24;
|
||||
var w = d * 7;
|
||||
var y = d * 365.25;
|
||||
|
||||
/**
|
||||
* Parse or format the given `val`.
|
||||
*
|
||||
* Options:
|
||||
*
|
||||
* - `long` verbose formatting [false]
|
||||
*
|
||||
* @param {String|Number} val
|
||||
* @param {Object} [options]
|
||||
* @throws {Error} throw an error if val is not a non-empty string or a number
|
||||
* @return {String|Number}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
module.exports = function (val, options) {
|
||||
options = options || {};
|
||||
var type = typeof val;
|
||||
if (type === 'string' && val.length > 0) {
|
||||
return parse(val);
|
||||
} else if (type === 'number' && isFinite(val)) {
|
||||
return options.long ? fmtLong(val) : fmtShort(val);
|
||||
}
|
||||
throw new Error(
|
||||
'val is not a non-empty string or a valid number. val=' +
|
||||
JSON.stringify(val)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse the given `str` and return milliseconds.
|
||||
*
|
||||
* @param {String} str
|
||||
* @return {Number}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function parse(str) {
|
||||
str = String(str);
|
||||
if (str.length > 100) {
|
||||
return;
|
||||
}
|
||||
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
|
||||
str
|
||||
);
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
var n = parseFloat(match[1]);
|
||||
var type = (match[2] || 'ms').toLowerCase();
|
||||
switch (type) {
|
||||
case 'years':
|
||||
case 'year':
|
||||
case 'yrs':
|
||||
case 'yr':
|
||||
case 'y':
|
||||
return n * y;
|
||||
case 'weeks':
|
||||
case 'week':
|
||||
case 'w':
|
||||
return n * w;
|
||||
case 'days':
|
||||
case 'day':
|
||||
case 'd':
|
||||
return n * d;
|
||||
case 'hours':
|
||||
case 'hour':
|
||||
case 'hrs':
|
||||
case 'hr':
|
||||
case 'h':
|
||||
return n * h;
|
||||
case 'minutes':
|
||||
case 'minute':
|
||||
case 'mins':
|
||||
case 'min':
|
||||
case 'm':
|
||||
return n * m;
|
||||
case 'seconds':
|
||||
case 'second':
|
||||
case 'secs':
|
||||
case 'sec':
|
||||
case 's':
|
||||
return n * s;
|
||||
case 'milliseconds':
|
||||
case 'millisecond':
|
||||
case 'msecs':
|
||||
case 'msec':
|
||||
case 'ms':
|
||||
return n;
|
||||
default:
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Short format for `ms`.
|
||||
*
|
||||
* @param {Number} ms
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function fmtShort(ms) {
|
||||
var msAbs = Math.abs(ms);
|
||||
if (msAbs >= d) {
|
||||
return Math.round(ms / d) + 'd';
|
||||
}
|
||||
if (msAbs >= h) {
|
||||
return Math.round(ms / h) + 'h';
|
||||
}
|
||||
if (msAbs >= m) {
|
||||
return Math.round(ms / m) + 'm';
|
||||
}
|
||||
if (msAbs >= s) {
|
||||
return Math.round(ms / s) + 's';
|
||||
}
|
||||
return ms + 'ms';
|
||||
}
|
||||
|
||||
/**
|
||||
* Long format for `ms`.
|
||||
*
|
||||
* @param {Number} ms
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function fmtLong(ms) {
|
||||
var msAbs = Math.abs(ms);
|
||||
if (msAbs >= d) {
|
||||
return plural(ms, msAbs, d, 'day');
|
||||
}
|
||||
if (msAbs >= h) {
|
||||
return plural(ms, msAbs, h, 'hour');
|
||||
}
|
||||
if (msAbs >= m) {
|
||||
return plural(ms, msAbs, m, 'minute');
|
||||
}
|
||||
if (msAbs >= s) {
|
||||
return plural(ms, msAbs, s, 'second');
|
||||
}
|
||||
return ms + ' ms';
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluralization helper.
|
||||
*/
|
||||
|
||||
function plural(ms, msAbs, n, name) {
|
||||
var isPlural = msAbs >= n * 1.5;
|
||||
return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
|
||||
}
|
21
node_modules/needle/node_modules/ms/license.md
generated
vendored
21
node_modules/needle/node_modules/ms/license.md
generated
vendored
|
@ -1,21 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2020 Vercel, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
38
node_modules/needle/node_modules/ms/package.json
generated
vendored
38
node_modules/needle/node_modules/ms/package.json
generated
vendored
|
@ -1,38 +0,0 @@
|
|||
{
|
||||
"name": "ms",
|
||||
"version": "2.1.3",
|
||||
"description": "Tiny millisecond conversion utility",
|
||||
"repository": "vercel/ms",
|
||||
"main": "./index",
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"scripts": {
|
||||
"precommit": "lint-staged",
|
||||
"lint": "eslint lib/* bin/*",
|
||||
"test": "mocha tests.js"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": "eslint:recommended",
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"npm run lint",
|
||||
"prettier --single-quote --write",
|
||||
"git add"
|
||||
]
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"eslint": "4.18.2",
|
||||
"expect.js": "0.3.1",
|
||||
"husky": "0.14.3",
|
||||
"lint-staged": "5.0.0",
|
||||
"mocha": "4.0.1",
|
||||
"prettier": "2.0.5"
|
||||
}
|
||||
}
|
59
node_modules/needle/node_modules/ms/readme.md
generated
vendored
59
node_modules/needle/node_modules/ms/readme.md
generated
vendored
|
@ -1,59 +0,0 @@
|
|||
# ms
|
||||
|
||||
![CI](https://github.com/vercel/ms/workflows/CI/badge.svg)
|
||||
|
||||
Use this package to easily convert various time formats to milliseconds.
|
||||
|
||||
## Examples
|
||||
|
||||
```js
|
||||
ms('2 days') // 172800000
|
||||
ms('1d') // 86400000
|
||||
ms('10h') // 36000000
|
||||
ms('2.5 hrs') // 9000000
|
||||
ms('2h') // 7200000
|
||||
ms('1m') // 60000
|
||||
ms('5s') // 5000
|
||||
ms('1y') // 31557600000
|
||||
ms('100') // 100
|
||||
ms('-3 days') // -259200000
|
||||
ms('-1h') // -3600000
|
||||
ms('-200') // -200
|
||||
```
|
||||
|
||||
### Convert from Milliseconds
|
||||
|
||||
```js
|
||||
ms(60000) // "1m"
|
||||
ms(2 * 60000) // "2m"
|
||||
ms(-3 * 60000) // "-3m"
|
||||
ms(ms('10 hours')) // "10h"
|
||||
```
|
||||
|
||||
### Time Format Written-Out
|
||||
|
||||
```js
|
||||
ms(60000, { long: true }) // "1 minute"
|
||||
ms(2 * 60000, { long: true }) // "2 minutes"
|
||||
ms(-3 * 60000, { long: true }) // "-3 minutes"
|
||||
ms(ms('10 hours'), { long: true }) // "10 hours"
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- Works both in [Node.js](https://nodejs.org) and in the browser
|
||||
- If a number is supplied to `ms`, a string with a unit is returned
|
||||
- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`)
|
||||
- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned
|
||||
|
||||
## Related Packages
|
||||
|
||||
- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time.
|
||||
|
||||
## Caught a Bug?
|
||||
|
||||
1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device
|
||||
2. Link the package to the global module directory: `npm link`
|
||||
3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms!
|
||||
|
||||
As always, you can run the tests using: `npm test`
|
71
node_modules/needle/package.json
generated
vendored
71
node_modules/needle/package.json
generated
vendored
|
@ -1,71 +0,0 @@
|
|||
{
|
||||
"name": "needle",
|
||||
"version": "2.9.1",
|
||||
"description": "The leanest and most handsome HTTP client in the Nodelands.",
|
||||
"keywords": [
|
||||
"http",
|
||||
"https",
|
||||
"simple",
|
||||
"request",
|
||||
"client",
|
||||
"multipart",
|
||||
"upload",
|
||||
"proxy",
|
||||
"deflate",
|
||||
"timeout",
|
||||
"charset",
|
||||
"iconv",
|
||||
"cookie",
|
||||
"redirect"
|
||||
],
|
||||
"tags": [
|
||||
"http",
|
||||
"https",
|
||||
"simple",
|
||||
"request",
|
||||
"client",
|
||||
"multipart",
|
||||
"upload",
|
||||
"proxy",
|
||||
"deflate",
|
||||
"timeout",
|
||||
"charset",
|
||||
"iconv",
|
||||
"cookie",
|
||||
"redirect"
|
||||
],
|
||||
"author": "Tomás Pollak <tomas@forkhq.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tomas/needle.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": "^3.2.6",
|
||||
"iconv-lite": "^0.4.4",
|
||||
"sax": "^1.2.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"JSONStream": "^1.3.5",
|
||||
"jschardet": "^1.6.0",
|
||||
"mocha": "^5.2.0",
|
||||
"pump": "^3.0.0",
|
||||
"q": "^1.5.1",
|
||||
"should": "^13.2.3",
|
||||
"sinon": "^2.3.0",
|
||||
"xml2js": "^0.4.19"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha test"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"main": "./lib/needle",
|
||||
"bin": {
|
||||
"needle": "./bin/needle"
|
||||
},
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 4.4.x"
|
||||
}
|
||||
}
|
192
node_modules/needle/test/auth_digest_spec.js
generated
vendored
192
node_modules/needle/test/auth_digest_spec.js
generated
vendored
|
@ -1,192 +0,0 @@
|
|||
var needle = require('../'),
|
||||
auth = require('../lib/auth'),
|
||||
sinon = require('sinon'),
|
||||
should = require('should'),
|
||||
http = require('http'),
|
||||
helpers = require('./helpers');
|
||||
|
||||
var createHash = require('crypto').createHash;
|
||||
|
||||
function md5(string) {
|
||||
return createHash('md5').update(string).digest('hex');
|
||||
}
|
||||
|
||||
function parse_header(header) {
|
||||
var challenge = {},
|
||||
matches = header.match(/([a-z0-9_-]+)="?([a-z0-9=\/\.@\s-\+]+)"?/gi);
|
||||
|
||||
for (var i = 0, l = matches.length; i < l; i++) {
|
||||
var parts = matches[i].split('='),
|
||||
key = parts.shift(),
|
||||
val = parts.join('=').replace(/^"/, '').replace(/"$/, '');
|
||||
|
||||
challenge[key] = val;
|
||||
}
|
||||
|
||||
return challenge;
|
||||
}
|
||||
|
||||
describe('auth_digest', function() {
|
||||
describe('With qop (RFC 2617)', function() {
|
||||
it('should generate a proper header', function() {
|
||||
// from https://tools.ietf.org/html/rfc2617
|
||||
var performDigest = function() {
|
||||
var header = 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"';
|
||||
var user = 'Mufasa';
|
||||
var pass = 'Circle Of Life';
|
||||
var method = 'get';
|
||||
var path = '/dir/index.html';
|
||||
|
||||
var updatedHeader = auth.digest(header, user, pass, method, path);
|
||||
var parsedUpdatedHeader = parse_header(updatedHeader);
|
||||
|
||||
var ha1 = md5(user + ':' + parsedUpdatedHeader.realm + ':' + pass);
|
||||
var ha2 = md5(method.toUpperCase() + ':' + path);
|
||||
var expectedResponse = md5([
|
||||
ha1,
|
||||
parsedUpdatedHeader.nonce,
|
||||
parsedUpdatedHeader.nc,
|
||||
parsedUpdatedHeader.cnonce,
|
||||
parsedUpdatedHeader.qop,
|
||||
ha2
|
||||
].join(':'));
|
||||
|
||||
return {
|
||||
header: updatedHeader,
|
||||
parsed: parsedUpdatedHeader,
|
||||
expectedResponse: expectedResponse,
|
||||
}
|
||||
}
|
||||
|
||||
const result = performDigest();
|
||||
|
||||
(result.header).should
|
||||
.match(/qop="auth"/)
|
||||
.match(/uri="\/dir\/index.html"/)
|
||||
.match(/opaque="5ccc069c403ebaf9f0171e9517f40e41"/)
|
||||
.match(/realm="testrealm@host\.com"/)
|
||||
.match(/response=/)
|
||||
.match(/nc=/)
|
||||
.match(/nonce=/)
|
||||
.match(/cnonce=/);
|
||||
|
||||
(result.parsed.response).should.be.eql(result.expectedResponse);
|
||||
});
|
||||
});
|
||||
|
||||
describe('With plus character in nonce header', function() {
|
||||
it('should generate a proper header', function() {
|
||||
// from https://tools.ietf.org/html/rfc2617
|
||||
var performDigest = function() {
|
||||
var header = 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f6+00bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"';
|
||||
var user = 'Mufasa';
|
||||
var pass = 'Circle Of Life';
|
||||
var method = 'get';
|
||||
var path = '/dir/index.html';
|
||||
|
||||
var updatedHeader = auth.digest(header, user, pass, method, path);
|
||||
var parsedUpdatedHeader = parse_header(updatedHeader);
|
||||
|
||||
var ha1 = md5(user + ':' + parsedUpdatedHeader.realm + ':' + pass);
|
||||
var ha2 = md5(method.toUpperCase() + ':' + path);
|
||||
var expectedResponse = md5([
|
||||
ha1,
|
||||
parsedUpdatedHeader.nonce,
|
||||
parsedUpdatedHeader.nc,
|
||||
parsedUpdatedHeader.cnonce,
|
||||
parsedUpdatedHeader.qop,
|
||||
ha2
|
||||
].join(':'));
|
||||
|
||||
return {
|
||||
header: updatedHeader,
|
||||
parsed: parsedUpdatedHeader,
|
||||
expectedResponse: expectedResponse,
|
||||
}
|
||||
}
|
||||
|
||||
const result = performDigest();
|
||||
|
||||
(result.header).should
|
||||
.match(/nonce="dcd98b7102dd2f0e8b11d0f6\+00bfb0c093"/)
|
||||
});
|
||||
});
|
||||
|
||||
describe('With brackets in realm header', function() {
|
||||
it('should generate a proper header', function() {
|
||||
// from https://tools.ietf.org/html/rfc2617
|
||||
var performDigest = function() {
|
||||
var header = 'Digest qop="auth", realm="IP Camera(76475)", nonce="4e4449794d575269597a706b5a575935595441324d673d3d", stale="FALSE", Basic realm="IP Camera(76475)"';
|
||||
var user = 'Mufasa';
|
||||
var pass = 'Circle Of Life';
|
||||
var method = 'get';
|
||||
var path = '/dir/index.html';
|
||||
|
||||
var updatedHeader = auth.digest(header, user, pass, method, path);
|
||||
var parsedUpdatedHeader = parse_header(updatedHeader);
|
||||
|
||||
var ha1 = md5(user + ':' + parsedUpdatedHeader.realm + ':' + pass);
|
||||
var ha2 = md5(method.toUpperCase() + ':' + path);
|
||||
var expectedResponse = md5([
|
||||
ha1,
|
||||
parsedUpdatedHeader.nonce,
|
||||
parsedUpdatedHeader.nc,
|
||||
parsedUpdatedHeader.cnonce,
|
||||
parsedUpdatedHeader.qop,
|
||||
ha2
|
||||
].join(':'));
|
||||
|
||||
return {
|
||||
header: updatedHeader,
|
||||
parsed: parsedUpdatedHeader,
|
||||
expectedResponse: expectedResponse,
|
||||
}
|
||||
}
|
||||
|
||||
const result = performDigest();
|
||||
|
||||
(result.header).should
|
||||
.match(/realm="IP Camera\(76475\)"/)
|
||||
});
|
||||
});
|
||||
|
||||
describe('Without qop (RFC 2617)', function() {
|
||||
it('should generate a proper header', function() {
|
||||
// from https://tools.ietf.org/html/rfc2069
|
||||
var performDigest = function() {
|
||||
var header = 'Digest realm="testrealm@host.com", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"';
|
||||
var user = 'Mufasa';
|
||||
var pass = 'Circle Of Life';
|
||||
var method = 'get';
|
||||
var path = '/dir/index.html';
|
||||
|
||||
var updatedHeader = auth.digest(header, user, pass, method, path);
|
||||
var parsedUpdatedHeader = parse_header(updatedHeader);
|
||||
|
||||
var ha1 = md5(user + ':' + parsedUpdatedHeader.realm + ':' + pass);
|
||||
var ha2 = md5(method.toUpperCase() + ':' + path);
|
||||
var expectedResponse = md5([ha1, parsedUpdatedHeader.nonce, ha2].join(':'));
|
||||
|
||||
return {
|
||||
header: updatedHeader,
|
||||
parsed: parsedUpdatedHeader,
|
||||
expectedResponse: expectedResponse,
|
||||
}
|
||||
}
|
||||
|
||||
const result = performDigest();
|
||||
|
||||
(result.header).should
|
||||
.not.match(/qop=/)
|
||||
.match(/uri="\/dir\/index.html"/)
|
||||
.match(/opaque="5ccc069c403ebaf9f0171e9517f40e41"/)
|
||||
.match(/realm="testrealm@host\.com"/)
|
||||
.match(/response=/)
|
||||
.not.match(/nc=/)
|
||||
.match(/nonce=/)
|
||||
.not.match(/cnonce=/);
|
||||
|
||||
(result.parsed.response).should.be.eql(result.expectedResponse);
|
||||
});
|
||||
});
|
||||
})
|
196
node_modules/needle/test/basic_auth_spec.js
generated
vendored
196
node_modules/needle/test/basic_auth_spec.js
generated
vendored
|
@ -1,196 +0,0 @@
|
|||
var helpers = require('./helpers'),
|
||||
should = require('should'),
|
||||
needle = require('./../'),
|
||||
server;
|
||||
|
||||
var port = 7707;
|
||||
|
||||
describe('Basic Auth', function() {
|
||||
|
||||
before(function(done) {
|
||||
server = helpers.server({ port: port }, done);
|
||||
})
|
||||
|
||||
after(function(done) {
|
||||
server.close(done);
|
||||
})
|
||||
|
||||
///////////////// helpers
|
||||
|
||||
var get_auth = function(header) {
|
||||
var token = header.split(/\s+/).pop();
|
||||
return token && Buffer.from(token, 'base64').toString().split(':');
|
||||
}
|
||||
|
||||
describe('when neither username or password are passed', function() {
|
||||
|
||||
it('doesnt send any Authorization headers', function(done) {
|
||||
needle.get('localhost:' + port, { parse: true }, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.not.containEql('authorization');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('when username is an empty string, and password is a valid string', function() {
|
||||
|
||||
var opts = { username: '', password: 'foobar', parse: true };
|
||||
|
||||
it('doesnt send any Authorization headers', function(done) {
|
||||
needle.get('localhost:' + port, { parse: true }, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.not.containEql('authorization');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
});
|
||||
|
||||
describe('when username is a valid string, but no username is passed', function() {
|
||||
|
||||
var opts = { username: 'foobar', parse: true };
|
||||
|
||||
it('sends Authorization header', function(done) {
|
||||
needle.get('localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.containEql('authorization');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('Basic Auth only includes username, without colon', function(done) {
|
||||
needle.get('localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
var auth = get_auth(sent_headers['authorization']);
|
||||
auth[0].should.equal('foobar');
|
||||
auth.should.have.lengthOf(1);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('when username is a valid string, and password is null', function() {
|
||||
|
||||
var opts = { username: 'foobar', password: null, parse: true };
|
||||
|
||||
it('sends Authorization header', function(done) {
|
||||
needle.get('localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.containEql('authorization');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('Basic Auth only includes both username and password', function(done) {
|
||||
needle.get('localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
var auth = get_auth(sent_headers['authorization']);
|
||||
auth[0].should.equal('foobar');
|
||||
auth[1].should.equal('');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('when username is a valid string, and password is an empty string', function() {
|
||||
|
||||
var opts = { username: 'foobar', password: '', parse: true };
|
||||
|
||||
it('sends Authorization header', function(done) {
|
||||
needle.get('localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.containEql('authorization');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('Basic Auth only includes both username and password', function(done) {
|
||||
needle.get('localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
var auth = get_auth(sent_headers['authorization']);
|
||||
auth[0].should.equal('foobar');
|
||||
auth[1].should.equal('');
|
||||
auth.should.have.lengthOf(2);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('when username AND password are non empty strings', function() {
|
||||
|
||||
var opts = { username: 'foobar', password: 'jakub', parse: true };
|
||||
|
||||
it('sends Authorization header', function(done) {
|
||||
needle.get('localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.containEql('authorization');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('Basic Auth only includes both user and password', function(done) {
|
||||
needle.get('localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
var auth = get_auth(sent_headers['authorization']);
|
||||
auth[0].should.equal('foobar');
|
||||
auth[1].should.equal('jakub');
|
||||
auth.should.have.lengthOf(2);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('URL with @ but not username/pass', function() {
|
||||
it('doesnt send Authorization header', function(done) {
|
||||
var url = 'localhost:' + port + '/abc/@def/xyz.zip';
|
||||
|
||||
needle.get(url, {}, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.not.containEql('authorization');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('sends user:pass headers if passed via options', function(done) {
|
||||
var url = 'localhost:' + port + '/abc/@def/xyz.zip';
|
||||
|
||||
needle.get(url, { username: 'foo' }, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.containEql('authorization');
|
||||
sent_headers['authorization'].should.eql('Basic Zm9v')
|
||||
done();
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when username/password are included in URL', function() {
|
||||
var opts = { parse: true };
|
||||
|
||||
it('sends Authorization header', function(done) {
|
||||
needle.get('foobar:jakub@localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
Object.keys(sent_headers).should.containEql('authorization');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('Basic Auth only includes both user and password', function(done) {
|
||||
needle.get('foobar:jakub@localhost:' + port, opts, function(err, resp) {
|
||||
var sent_headers = resp.body.headers;
|
||||
var auth = get_auth(sent_headers['authorization']);
|
||||
auth[0].should.equal('foobar');
|
||||
auth[1].should.equal('jakub');
|
||||
auth.should.have.lengthOf(2);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
112
node_modules/needle/test/compression_spec.js
generated
vendored
112
node_modules/needle/test/compression_spec.js
generated
vendored
|
@ -1,112 +0,0 @@
|
|||
var should = require('should'),
|
||||
needle = require('./../'),
|
||||
http = require('http'),
|
||||
zlib = require('zlib'),
|
||||
stream = require('stream'),
|
||||
port = 11123,
|
||||
server;
|
||||
|
||||
describe('compression', function(){
|
||||
|
||||
require.bind(null, 'zlib').should.not.throw()
|
||||
|
||||
var jsonData = '{"foo":"bar"}';
|
||||
|
||||
describe('when server supports compression', function(){
|
||||
|
||||
before(function(){
|
||||
server = http.createServer(function(req, res) {
|
||||
var raw = new stream.PassThrough();
|
||||
|
||||
var acceptEncoding = req.headers['accept-encoding'];
|
||||
if (!acceptEncoding) {
|
||||
acceptEncoding = '';
|
||||
}
|
||||
|
||||
if (acceptEncoding.match(/\bdeflate\b/)) {
|
||||
res.setHeader('Content-Encoding', 'deflate');
|
||||
raw.pipe(zlib.createDeflate()).pipe(res);
|
||||
} else if (acceptEncoding.match(/\bgzip\b/)) {
|
||||
res.setHeader('Content-Encoding', 'gzip');
|
||||
raw.pipe(zlib.createGzip()).pipe(res);
|
||||
} else if (acceptEncoding.match(/\bbr\b/)) {
|
||||
res.setHeader('Content-Encoding', 'br');
|
||||
raw.pipe(zlib.createBrotliCompress()).pipe(res);
|
||||
} else {
|
||||
raw.pipe(res);
|
||||
}
|
||||
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
if (req.headers['with-bad']) {
|
||||
res.end('foo'); // end, no deflate data
|
||||
} else {
|
||||
raw.end(jsonData)
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
server.listen(port);
|
||||
});
|
||||
|
||||
after(function(done){
|
||||
server.close(done);
|
||||
})
|
||||
|
||||
describe('and client requests no compression', function() {
|
||||
it('should have the body decompressed', function(done){
|
||||
needle.get('localhost:' + port, function(err, response, body){
|
||||
should.ifError(err);
|
||||
body.should.have.property('foo', 'bar');
|
||||
response.bytes.should.equal(jsonData.length);
|
||||
done();
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('and client requests gzip compression', function() {
|
||||
it('should have the body decompressed', function(done){
|
||||
needle.get('localhost:' + port, {headers: {'Accept-Encoding': 'gzip'}}, function(err, response, body){
|
||||
should.ifError(err);
|
||||
body.should.have.property('foo', 'bar');
|
||||
response.bytes.should.not.equal(jsonData.length);
|
||||
done();
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('and client requests deflate compression', function() {
|
||||
it('should have the body decompressed', function(done){
|
||||
needle.get('localhost:' + port, {headers: {'Accept-Encoding': 'deflate'}}, function(err, response, body){
|
||||
should.ifError(err);
|
||||
body.should.have.property('foo', 'bar');
|
||||
response.bytes.should.not.equal(jsonData.length);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('should rethrow errors from decompressors', function(done){
|
||||
needle.get('localhost:' + port, {headers: {'Accept-Encoding': 'deflate', 'With-Bad': 'true'}}, function(err, response, body) {
|
||||
should.exist(err);
|
||||
err.message.should.equal("incorrect header check");
|
||||
err.code.should.equal("Z_DATA_ERROR")
|
||||
done();
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('and client requests brotli compression', function() {
|
||||
it('should have the body decompressed', function(done){
|
||||
// Skip this test if Brotli is not supported
|
||||
if (typeof zlib.BrotliDecompress !== 'function') {
|
||||
return done();
|
||||
}
|
||||
needle.get('localhost:' + port, {headers: {'Accept-Encoding': 'br'}}, function(err, response, body){
|
||||
should.ifError(err);
|
||||
body.should.have.property('foo', 'bar');
|
||||
response.bytes.should.not.equal(jsonData.length);
|
||||
done();
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
396
node_modules/needle/test/cookies_spec.js
generated
vendored
396
node_modules/needle/test/cookies_spec.js
generated
vendored
|
@ -1,396 +0,0 @@
|
|||
var needle = require('../'),
|
||||
cookies = require('../lib/cookies'),
|
||||
sinon = require('sinon'),
|
||||
http = require('http'),
|
||||
should = require('should');
|
||||
|
||||
var WEIRD_COOKIE_NAME = 'wc',
|
||||
BASE64_COOKIE_NAME = 'bc',
|
||||
FORBIDDEN_COOKIE_NAME = 'fc',
|
||||
NUMBER_COOKIE_NAME = 'nc';
|
||||
|
||||
var WEIRD_COOKIE_VALUE = '!\'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~',
|
||||
BASE64_COOKIE_VALUE = 'Y29va2llCg==',
|
||||
FORBIDDEN_COOKIE_VALUE = ' ;"\\,',
|
||||
NUMBER_COOKIE_VALUE = 12354342;
|
||||
|
||||
var NO_COOKIES_TEST_PORT = 11112,
|
||||
ALL_COOKIES_TEST_PORT = 11113;
|
||||
|
||||
describe('cookies', function() {
|
||||
|
||||
var setCookieHeader, headers, server, opts;
|
||||
|
||||
function decode(str) {
|
||||
return decodeURIComponent(str);
|
||||
}
|
||||
|
||||
function encode(str) {
|
||||
str = str.toString().replace(/[\x00-\x1F\x7F]/g, encodeURIComponent);
|
||||
return str.replace(/[\s\"\,;\\%]/g, encodeURIComponent);
|
||||
}
|
||||
|
||||
before(function() {
|
||||
setCookieHeader = [
|
||||
WEIRD_COOKIE_NAME + '=' + encode(WEIRD_COOKIE_VALUE) + ';',
|
||||
BASE64_COOKIE_NAME + '=' + encode(BASE64_COOKIE_VALUE) + ';',
|
||||
FORBIDDEN_COOKIE_NAME + '=' + encode(FORBIDDEN_COOKIE_VALUE) + ';',
|
||||
NUMBER_COOKIE_NAME + '=' + encode(NUMBER_COOKIE_VALUE) + ';'
|
||||
];
|
||||
});
|
||||
|
||||
before(function(done) {
|
||||
serverAllCookies = http.createServer(function(req, res) {
|
||||
res.setHeader('Content-Type', 'text/html');
|
||||
res.setHeader('Set-Cookie', setCookieHeader);
|
||||
res.end('200');
|
||||
}).listen(ALL_COOKIES_TEST_PORT, done);
|
||||
});
|
||||
|
||||
after(function(done) {
|
||||
serverAllCookies.close(done);
|
||||
});
|
||||
|
||||
describe('with default options', function() {
|
||||
it('no cookie header is set on request', function(done) {
|
||||
needle.get(
|
||||
'localhost:' + ALL_COOKIES_TEST_PORT, function(err, response) {
|
||||
should.not.exist(response.req._headers.cookie);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('if response does not contain cookies', function() {
|
||||
before(function(done) {
|
||||
serverNoCookies = http.createServer(function(req, res) {
|
||||
res.setHeader('Content-Type', 'text/html');
|
||||
res.end('200');
|
||||
}).listen(NO_COOKIES_TEST_PORT, done);
|
||||
});
|
||||
|
||||
it('response.cookies is undefined', function(done) {
|
||||
needle.get(
|
||||
'localhost:' + NO_COOKIES_TEST_PORT, function(error, response) {
|
||||
should.not.exist(response.cookies);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
after(function(done) {
|
||||
serverNoCookies.close(done);
|
||||
});
|
||||
});
|
||||
|
||||
describe('if response contains cookies', function() {
|
||||
|
||||
it('puts them on resp.cookies', function(done) {
|
||||
needle.get(
|
||||
'localhost:' + ALL_COOKIES_TEST_PORT, function(error, response) {
|
||||
response.should.have.property('cookies');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('parses them as a object', function(done) {
|
||||
needle.get(
|
||||
'localhost:' + ALL_COOKIES_TEST_PORT, function(error, response) {
|
||||
response.cookies.should.be.an.instanceOf(Object)
|
||||
.and.have.property(WEIRD_COOKIE_NAME);
|
||||
response.cookies.should.have.property(BASE64_COOKIE_NAME);
|
||||
response.cookies.should.have.property(FORBIDDEN_COOKIE_NAME);
|
||||
response.cookies.should.have.property(NUMBER_COOKIE_NAME);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('must decode it', function(done) {
|
||||
needle.get(
|
||||
'localhost:' + ALL_COOKIES_TEST_PORT, function(error, response) {
|
||||
response.cookies.wc.should.be.eql(WEIRD_COOKIE_VALUE);
|
||||
response.cookies.bc.should.be.eql(BASE64_COOKIE_VALUE);
|
||||
response.cookies.fc.should.be.eql(FORBIDDEN_COOKIE_VALUE);
|
||||
response.cookies.nc.should.be.eql(NUMBER_COOKIE_VALUE.toString());
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a cookie value is invalid', function() {
|
||||
|
||||
before(function() {
|
||||
setCookieHeader = [
|
||||
'geo_city=%D1%E0%ED%EA%F2-%CF%E5%F2%E5%F0%E1%F3%F0%E3'
|
||||
];
|
||||
})
|
||||
|
||||
it('doesnt blow up', function(done) {
|
||||
needle.get('localhost:' + ALL_COOKIES_TEST_PORT, function(error, response) {
|
||||
should.not.exist(error)
|
||||
var whatever = 'efbfbdefbfbdefbfbdefbfbdefbfbd2defbfbdefbfbdefbfbdefbfbdefbfbdefbfbdefbfbdefbfbdefbfbd';
|
||||
Buffer.from(response.cookies.geo_city).toString('hex').should.eql(whatever)
|
||||
done();
|
||||
});
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('and response is a redirect', function() {
|
||||
|
||||
var redirectServer, testPort = 22222;
|
||||
var requestCookies = [];
|
||||
|
||||
var responseCookies = [
|
||||
[ // first req
|
||||
WEIRD_COOKIE_NAME + '=' + encode(WEIRD_COOKIE_VALUE) + ';',
|
||||
BASE64_COOKIE_NAME + '=' + encode(BASE64_COOKIE_VALUE) + ';',
|
||||
'FOO=123;'
|
||||
], [ // second req
|
||||
FORBIDDEN_COOKIE_NAME + '=' + encode(FORBIDDEN_COOKIE_VALUE) + ';',
|
||||
NUMBER_COOKIE_NAME + '=' + encode(NUMBER_COOKIE_VALUE) + ';'
|
||||
], [ // third red
|
||||
'FOO=BAR;'
|
||||
]
|
||||
]
|
||||
|
||||
before(function(done) {
|
||||
redirectServer = http.createServer(function(req, res) {
|
||||
var number = parseInt(req.url.replace('/', ''));
|
||||
var nextUrl = 'http://' + 'localhost:' + testPort + '/' + (number + 1);
|
||||
|
||||
if (number == 0) requestCookies = []; // reset
|
||||
requestCookies.push(req.headers['cookie']);
|
||||
|
||||
if (responseCookies[number]) { // we should send cookies for this request
|
||||
res.statusCode = 302;
|
||||
res.setHeader('Set-Cookie', responseCookies[number]);
|
||||
res.setHeader('Location', nextUrl);
|
||||
} else if (number == 3) {
|
||||
res.statusCode = 302; // redirect but without cookies
|
||||
res.setHeader('Location', nextUrl);
|
||||
}
|
||||
|
||||
res.end('OK');
|
||||
}).listen(22222, done);
|
||||
});
|
||||
|
||||
after(function(done) {
|
||||
redirectServer.close(done);
|
||||
})
|
||||
|
||||
describe('and follow_set_cookies is false', function() {
|
||||
|
||||
describe('with original request cookie', function() {
|
||||
|
||||
var opts = {
|
||||
follow_set_cookies: false,
|
||||
follow_max: 4,
|
||||
cookies: { 'xxx': 123 }
|
||||
};
|
||||
|
||||
it('request cookie is not passed to redirects', function(done) {
|
||||
needle.get('localhost:' + testPort + '/0', opts, function(err, resp) {
|
||||
requestCookies.should.eql(["xxx=123", undefined, undefined, undefined, undefined])
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('response cookies are not passed either', function(done) {
|
||||
needle.get('localhost:' + testPort + '/0', opts, function(err, resp) {
|
||||
should.not.exist(resp.cookies);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
})
|
||||
|
||||
describe('without original request cookie', function() {
|
||||
|
||||
var opts = {
|
||||
follow_set_cookies: false,
|
||||
follow_max: 4,
|
||||
};
|
||||
|
||||
it('no request cookies are sent', function(done) {
|
||||
needle.get('localhost:' + testPort + '/0', opts, function(err, resp) {
|
||||
requestCookies.should.eql([undefined, undefined, undefined, undefined, undefined])
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('response cookies are not passed either', function(done) {
|
||||
needle.get('localhost:' + testPort + '/0', opts, function(err, resp) {
|
||||
should.not.exist(resp.cookies);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
})
|
||||
|
||||
});
|
||||
|
||||
describe('and follow_set_cookies is true', function() {
|
||||
|
||||
describe('with original request cookie', function() {
|
||||
|
||||
var opts = {
|
||||
follow_set_cookies: true,
|
||||
follow_max: 4,
|
||||
cookies: { 'xxx': 123 }
|
||||
};
|
||||
|
||||
it('request cookie is passed passed to redirects, and response cookies are added too', function(done) {
|
||||
needle.get('localhost:' + testPort + '/0', opts, function(err, resp) {
|
||||
requestCookies.should.eql([
|
||||
"xxx=123",
|
||||
"xxx=123; wc=!'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~; bc=Y29va2llCg==; FOO=123",
|
||||
"xxx=123; wc=!\'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~; bc=Y29va2llCg==; FOO=123; fc=%20%3B%22%5C%2C; nc=12354342",
|
||||
"xxx=123; wc=!\'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~; bc=Y29va2llCg==; FOO=BAR; fc=%20%3B%22%5C%2C; nc=12354342",
|
||||
"xxx=123; wc=!\'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~; bc=Y29va2llCg==; FOO=BAR; fc=%20%3B%22%5C%2C; nc=12354342"
|
||||
])
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('response cookies are passed as well', function(done) {
|
||||
needle.get('localhost:' + testPort + '/0', opts, function(err, resp) {
|
||||
resp.cookies.should.have.property(WEIRD_COOKIE_NAME);
|
||||
resp.cookies.should.have.property(BASE64_COOKIE_NAME);
|
||||
resp.cookies.should.have.property(FORBIDDEN_COOKIE_NAME);
|
||||
resp.cookies.should.have.property(NUMBER_COOKIE_NAME);
|
||||
resp.cookies.should.have.property('FOO');
|
||||
resp.cookies.FOO.should.eql('BAR'); // should overwrite previous one
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
})
|
||||
|
||||
describe('without original request cookie', function() {
|
||||
|
||||
var opts = {
|
||||
follow_set_cookies: true,
|
||||
follow_max: 4,
|
||||
};
|
||||
|
||||
it('response cookies are passed to redirects', function(done) {
|
||||
needle.get('localhost:' + testPort + '/0', opts, function(err, resp) {
|
||||
requestCookies.should.eql([
|
||||
undefined,
|
||||
"wc=!'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~; bc=Y29va2llCg==; FOO=123",
|
||||
"wc=!\'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~; bc=Y29va2llCg==; FOO=123; fc=%20%3B%22%5C%2C; nc=12354342",
|
||||
"wc=!\'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~; bc=Y29va2llCg==; FOO=BAR; fc=%20%3B%22%5C%2C; nc=12354342",
|
||||
"wc=!\'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~; bc=Y29va2llCg==; FOO=BAR; fc=%20%3B%22%5C%2C; nc=12354342"
|
||||
])
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('response cookies are passed as well', function(done) {
|
||||
needle.get('localhost:' + testPort + '/0', opts, function(err, resp) {
|
||||
// resp.cookies.should.have.property(WEIRD_COOKIE_NAME);
|
||||
// resp.cookies.should.have.property(BASE64_COOKIE_NAME);
|
||||
// resp.cookies.should.have.property(FORBIDDEN_COOKIE_NAME);
|
||||
// resp.cookies.should.have.property(NUMBER_COOKIE_NAME);
|
||||
// resp.cookies.should.have.property('FOO');
|
||||
// resp.cookies.FOO.should.eql('BAR'); // should overwrite previous one
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
})
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
describe('with parse_cookies = false', function() {
|
||||
it('does not parse them', function(done) {
|
||||
needle.get(
|
||||
'localhost:' + ALL_COOKIES_TEST_PORT, { parse_cookies: false }, function(error, response) {
|
||||
should.not.exist(response.cookies);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('if request contains cookie header', function() {
|
||||
var opts = {
|
||||
cookies: {}
|
||||
};
|
||||
|
||||
before(function() {
|
||||
opts.cookies[WEIRD_COOKIE_NAME] = WEIRD_COOKIE_VALUE;
|
||||
opts.cookies[BASE64_COOKIE_NAME] = BASE64_COOKIE_VALUE;
|
||||
opts.cookies[FORBIDDEN_COOKIE_NAME] = FORBIDDEN_COOKIE_VALUE;
|
||||
opts.cookies[NUMBER_COOKIE_NAME] = NUMBER_COOKIE_VALUE;
|
||||
});
|
||||
|
||||
it('must be a valid cookie string', function(done) {
|
||||
var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/;
|
||||
|
||||
var full_header = [
|
||||
WEIRD_COOKIE_NAME + '=' + WEIRD_COOKIE_VALUE,
|
||||
BASE64_COOKIE_NAME + '=' + BASE64_COOKIE_VALUE,
|
||||
FORBIDDEN_COOKIE_NAME + '=' + encode(FORBIDDEN_COOKIE_VALUE),
|
||||
NUMBER_COOKIE_NAME + '=' + NUMBER_COOKIE_VALUE
|
||||
].join('; ')
|
||||
|
||||
needle.get('localhost:' + ALL_COOKIES_TEST_PORT, opts, function(error, response) {
|
||||
var cookieString = response.req._headers.cookie;
|
||||
cookieString.should.be.type('string');
|
||||
|
||||
cookieString.split(/\s*;\s*/).forEach(function(pair) {
|
||||
COOKIE_PAIR.test(pair).should.be.exactly(true);
|
||||
});
|
||||
|
||||
cookieString.should.be.exactly(full_header);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('dont have to encode allowed characters', function(done) {
|
||||
var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/,
|
||||
KEY_INDEX = 1,
|
||||
VALUE_INEX = 3;
|
||||
|
||||
needle.get('localhost:' + ALL_COOKIES_TEST_PORT, opts, function(error, response) {
|
||||
var cookieObj = {},
|
||||
cookieString = response.req._headers.cookie;
|
||||
|
||||
cookieString.split(/\s*;\s*/).forEach(function(str) {
|
||||
var pair = COOKIE_PAIR.exec(str);
|
||||
cookieObj[pair[KEY_INDEX]] = pair[VALUE_INEX];
|
||||
});
|
||||
|
||||
cookieObj[WEIRD_COOKIE_NAME].should.be.exactly(WEIRD_COOKIE_VALUE);
|
||||
cookieObj[BASE64_COOKIE_NAME].should.be.exactly(BASE64_COOKIE_VALUE);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('must encode forbidden characters', function(done) {
|
||||
var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/,
|
||||
KEY_INDEX = 1,
|
||||
VALUE_INEX = 3;
|
||||
|
||||
needle.get('localhost:' + ALL_COOKIES_TEST_PORT, opts, function(error, response) {
|
||||
var cookieObj = {},
|
||||
cookieString = response.req._headers.cookie;
|
||||
|
||||
cookieString.split(/\s*;\s*/).forEach(function(str) {
|
||||
var pair = COOKIE_PAIR.exec(str);
|
||||
cookieObj[pair[KEY_INDEX]] = pair[VALUE_INEX];
|
||||
});
|
||||
|
||||
cookieObj[FORBIDDEN_COOKIE_NAME].should.not.be.eql(
|
||||
FORBIDDEN_COOKIE_VALUE);
|
||||
cookieObj[FORBIDDEN_COOKIE_NAME].should.be.exactly(
|
||||
encode(FORBIDDEN_COOKIE_VALUE));
|
||||
cookieObj[FORBIDDEN_COOKIE_NAME].should.be.exactly(
|
||||
encodeURIComponent(FORBIDDEN_COOKIE_VALUE));
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
});
|
121
node_modules/needle/test/decoder_spec.js
generated
vendored
121
node_modules/needle/test/decoder_spec.js
generated
vendored
|
@ -1,121 +0,0 @@
|
|||
var should = require('should'),
|
||||
needle = require('./../'),
|
||||
Q = require('q'),
|
||||
chardet = require('jschardet'),
|
||||
helpers = require('./helpers');
|
||||
|
||||
describe('character encoding', function() {
|
||||
|
||||
var url;
|
||||
this.timeout(5000);
|
||||
|
||||
describe('Given content-type: "text/html; charset=EUC-JP"', function() {
|
||||
|
||||
before(function() {
|
||||
url = 'http://www.nina.jp/server/slackware/webapp/tomcat_charset.html';
|
||||
})
|
||||
|
||||
describe('with decode = false', function() {
|
||||
|
||||
it('does not decode', function(done) {
|
||||
|
||||
needle.get(url, { decode: false }, function(err, resp) {
|
||||
resp.body.should.be.a.String;
|
||||
chardet.detect(resp.body).encoding.should.eql('windows-1252');
|
||||
resp.body.indexOf('EUCを使う').should.eql(-1);
|
||||
done();
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('with decode = true', function() {
|
||||
|
||||
it('decodes', function(done) {
|
||||
|
||||
needle.get(url, { decode: true }, function(err, resp) {
|
||||
resp.body.should.be.a.String;
|
||||
chardet.detect(resp.body).encoding.should.eql('ascii');
|
||||
resp.body.indexOf('EUCを使う').should.not.eql(-1);
|
||||
done();
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('Given content-type: "text/html but file is charset: gb2312', function() {
|
||||
|
||||
it('encodes to UTF-8', function(done) {
|
||||
|
||||
// Our Needle wrapper that requests a chinese website.
|
||||
var task = Q.nbind(needle.get, needle, 'http://www.chinesetop100.com/');
|
||||
|
||||
// Different instantiations of this task
|
||||
var tasks = [Q.fcall(task, {decode: true}),
|
||||
Q.fcall(task, {decode: false})];
|
||||
|
||||
var results = tasks.map(function(task) {
|
||||
return task.then(function(obj) {
|
||||
return obj[0].body;
|
||||
});
|
||||
});
|
||||
|
||||
// Execute all requests concurrently
|
||||
Q.all(results).done(function(bodies) {
|
||||
|
||||
var charsets = [
|
||||
chardet.detect(bodies[0]).encoding,
|
||||
chardet.detect(bodies[1]).encoding,
|
||||
]
|
||||
|
||||
// We wanted to decode our first stream as specified by options
|
||||
charsets[0].should.equal('ascii');
|
||||
bodies[0].indexOf('全球中文网站前二十强').should.not.equal(-1);
|
||||
|
||||
// But not our second stream
|
||||
charsets[1].should.equal('windows-1252');
|
||||
bodies[1].indexOf('全球中文网站前二十强').should.equal(-1);
|
||||
|
||||
done();
|
||||
});
|
||||
})
|
||||
})
|
||||
|
||||
describe('Given content-type: "text/html"', function () {
|
||||
|
||||
var server,
|
||||
port = 54321,
|
||||
text = 'Magyarországi Fióktelepe'
|
||||
|
||||
before(function(done) {
|
||||
server = helpers.server({
|
||||
port: port,
|
||||
response: text,
|
||||
headers: { 'Content-Type': 'text/html' }
|
||||
}, done);
|
||||
})
|
||||
|
||||
after(function(done) {
|
||||
server.close(done)
|
||||
})
|
||||
|
||||
describe('with decode = false', function () {
|
||||
it('decodes by default to utf-8', function (done) {
|
||||
|
||||
needle.get('http://localhost:' + port, { decode: false }, function (err, resp) {
|
||||
resp.body.should.be.a.String;
|
||||
chardet.detect(resp.body).encoding.should.eql('ISO-8859-2');
|
||||
resp.body.should.eql('Magyarországi Fióktelepe')
|
||||
done();
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
})
|
272
node_modules/needle/test/errors_spec.js
generated
vendored
272
node_modules/needle/test/errors_spec.js
generated
vendored
|
@ -1,272 +0,0 @@
|
|||
var needle = require('../'),
|
||||
sinon = require('sinon'),
|
||||
should = require('should'),
|
||||
http = require('http'),
|
||||
Emitter = require('events').EventEmitter,
|
||||
helpers = require('./helpers');
|
||||
|
||||
var get_catch = function(url, opts) {
|
||||
var err;
|
||||
try {
|
||||
needle.get(url, opts);
|
||||
} catch(e) {
|
||||
err = e;
|
||||
}
|
||||
return err;
|
||||
}
|
||||
|
||||
describe('errors', function() {
|
||||
|
||||
after(function(done) {
|
||||
setTimeout(done, 100)
|
||||
})
|
||||
|
||||
describe('when host does not exist', function() {
|
||||
|
||||
var url = 'http://unexistinghost/foo';
|
||||
|
||||
describe('with callback', function() {
|
||||
|
||||
it('does not throw', function() {
|
||||
var ex = get_catch(url);
|
||||
should.not.exist(ex);
|
||||
})
|
||||
|
||||
it('callbacks an error', function(done) {
|
||||
needle.get(url, function(err) {
|
||||
err.should.be.a.Error;
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('error should be ENOTFOUND or EADDRINFO or EAI_AGAIN', function(done) {
|
||||
needle.get(url, function(err) {
|
||||
err.code.should.match(/ENOTFOUND|EADDRINFO|EAI_AGAIN/)
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('does not callback a response', function(done) {
|
||||
needle.get(url, function(err, resp) {
|
||||
should.not.exist(resp);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('does not emit an error event', function(done) {
|
||||
var emitted = false;
|
||||
var req = needle.get(url, function(err, resp) { })
|
||||
|
||||
req.on('error', function() {
|
||||
emitted = true;
|
||||
})
|
||||
|
||||
setTimeout(function() {
|
||||
emitted.should.eql(false);
|
||||
done();
|
||||
}, 100);
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('without callback', function() {
|
||||
|
||||
it('does not throw', function() {
|
||||
var ex = get_catch(url);
|
||||
should.not.exist(ex);
|
||||
})
|
||||
|
||||
it('emits end event once, with error', function(done) {
|
||||
var callcount = 0,
|
||||
stream = needle.get(url);
|
||||
|
||||
stream.on('done', function(err) {
|
||||
err.code.should.match(/ENOTFOUND|EADDRINFO|EAI_AGAIN/)
|
||||
callcount++;
|
||||
})
|
||||
|
||||
setTimeout(function() {
|
||||
callcount.should.equal(1);
|
||||
done();
|
||||
}, 200)
|
||||
})
|
||||
|
||||
it('does not emit a readable event', function(done) {
|
||||
var called = false,
|
||||
stream = needle.get(url);
|
||||
|
||||
stream.on('readable', function() {
|
||||
called = true;
|
||||
})
|
||||
|
||||
stream.on('done', function(err) {
|
||||
called.should.be.false;
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('does not emit an error event', function(done) {
|
||||
var emitted = false,
|
||||
stream = needle.get(url);
|
||||
|
||||
stream.on('error', function() {
|
||||
emitted = true;
|
||||
})
|
||||
|
||||
stream.on('done', function(err) {
|
||||
emitted.should.eql(false);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('when request times out waiting for response', function() {
|
||||
|
||||
var server,
|
||||
url = 'http://localhost:3333/foo';
|
||||
|
||||
var send_request = function(cb) {
|
||||
return needle.get(url, { response_timeout: 200 }, cb);
|
||||
}
|
||||
|
||||
before(function() {
|
||||
server = helpers.server({ port: 3333, wait: 1000 });
|
||||
})
|
||||
|
||||
after(function() {
|
||||
server.close();
|
||||
})
|
||||
|
||||
describe('with callback', function() {
|
||||
|
||||
it('aborts the request', function(done) {
|
||||
|
||||
var time = new Date();
|
||||
|
||||
send_request(function(err) {
|
||||
var timediff = (new Date() - time);
|
||||
timediff.should.be.within(200, 300);
|
||||
done();
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
it('callbacks an error', function(done) {
|
||||
send_request(function(err) {
|
||||
err.should.be.a.Error;
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('error should be ECONNRESET', function(done) {
|
||||
send_request(function(err) {
|
||||
err.code.should.equal('ECONNRESET')
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('does not callback a response', function(done) {
|
||||
send_request(function(err, resp) {
|
||||
should.not.exist(resp);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('does not emit an error event', function(done) {
|
||||
var emitted = false;
|
||||
|
||||
var req = send_request(function(err, resp) {
|
||||
should.not.exist(resp);
|
||||
})
|
||||
|
||||
req.on('error', function() {
|
||||
emitted = true;
|
||||
})
|
||||
|
||||
setTimeout(function() {
|
||||
emitted.should.eql(false);
|
||||
done();
|
||||
}, 350);
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('without callback', function() {
|
||||
|
||||
it('emits done event once, with error', function(done) {
|
||||
var error,
|
||||
called = 0,
|
||||
stream = send_request();
|
||||
|
||||
stream.on('done', function(err) {
|
||||
err.code.should.equal('ECONNRESET');
|
||||
called++;
|
||||
})
|
||||
|
||||
setTimeout(function() {
|
||||
called.should.equal(1);
|
||||
done();
|
||||
}, 250)
|
||||
})
|
||||
|
||||
it('aborts the request', function(done) {
|
||||
|
||||
var time = new Date();
|
||||
var stream = send_request();
|
||||
|
||||
stream.on('done', function(err) {
|
||||
var timediff = (new Date() - time);
|
||||
timediff.should.be.within(200, 300);
|
||||
done();
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
it('error should be ECONNRESET', function(done) {
|
||||
var error,
|
||||
stream = send_request();
|
||||
|
||||
stream.on('done', function(err) {
|
||||
err.code.should.equal('ECONNRESET')
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('does not emit a readable event', function(done) {
|
||||
var called = false,
|
||||
stream = send_request();
|
||||
|
||||
stream.on('readable', function() {
|
||||
called = true;
|
||||
})
|
||||
|
||||
stream.on('done', function(err) {
|
||||
called.should.be.false;
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('does not emit an error event', function(done) {
|
||||
var emitted = false;
|
||||
var stream = send_request();
|
||||
|
||||
stream.on('error', function() {
|
||||
emitted = true;
|
||||
})
|
||||
|
||||
stream.on('done', function(err) {
|
||||
err.should.be.a.Error;
|
||||
err.code.should.equal('ECONNRESET')
|
||||
emitted.should.eql(false);
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
203
node_modules/needle/test/headers_spec.js
generated
vendored
203
node_modules/needle/test/headers_spec.js
generated
vendored
|
@ -1,203 +0,0 @@
|
|||
var http = require('http'),
|
||||
helpers = require('./helpers'),
|
||||
should = require('should');
|
||||
|
||||
var port = 54321;
|
||||
|
||||
describe('request headers', function() {
|
||||
|
||||
var needle,
|
||||
server,
|
||||
existing_sockets,
|
||||
original_defaultMaxSockets;
|
||||
|
||||
before(function(done) {
|
||||
setTimeout(function() {
|
||||
existing_sockets = get_active_sockets().length;
|
||||
server = helpers.server({ port: port }, done);
|
||||
}, 100);
|
||||
})
|
||||
|
||||
after(function(done) {
|
||||
server.close(done);
|
||||
})
|
||||
|
||||
function send_request(opts, cb) {
|
||||
needle.get('http://localhost:' + port, opts, cb);
|
||||
}
|
||||
|
||||
function get_active_sockets() {
|
||||
var handles = process._getActiveHandles();
|
||||
|
||||
return handles.filter(function(el) {
|
||||
if (el.constructor.name.toString() == 'Socket') {
|
||||
return el.destroyed !== true;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
describe('old node versions (<0.11.4) with persistent keep-alive connections', function() {
|
||||
|
||||
before(function() {
|
||||
delete require.cache[require.resolve('..')] // in case it was already loaded
|
||||
original_defaultMaxSockets = http.Agent.defaultMaxSockets;
|
||||
http.Agent.defaultMaxSockets = 5;
|
||||
needle = require('..');
|
||||
})
|
||||
|
||||
after(function() {
|
||||
http.Agent.defaultMaxSockets = original_defaultMaxSockets;
|
||||
delete require.cache[require.resolve('..')]
|
||||
})
|
||||
|
||||
describe('default options', function() {
|
||||
|
||||
it('sends a Connection: close header', function(done) {
|
||||
send_request({}, function(err, resp) {
|
||||
resp.body.headers['connection'].should.eql('close');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('no open sockets remain after request', function(done) {
|
||||
send_request({}, function(err, resp) {
|
||||
setTimeout(function() {
|
||||
get_active_sockets().length.should.eql(existing_sockets);
|
||||
done();
|
||||
}, 10)
|
||||
});
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('passing connection: close', function() {
|
||||
|
||||
it('sends a Connection: close header', function(done) {
|
||||
send_request({ connection: 'close' }, function(err, resp) {
|
||||
resp.body.headers['connection'].should.eql('close');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('no open sockets remain after request', function(done) {
|
||||
send_request({ connection: 'close' }, function(err, resp) {
|
||||
setTimeout(function() {
|
||||
get_active_sockets().length.should.eql(existing_sockets);
|
||||
done();
|
||||
}, 10)
|
||||
});
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('passing connection: keep-alive', function() {
|
||||
|
||||
it('sends a Connection: keep-alive header (using options.headers.connection)', function(done) {
|
||||
send_request({ headers: { connection: 'keep-alive' }}, function(err, resp) {
|
||||
resp.body.headers['connection'].should.eql('keep-alive');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('sends a Connection: keep-alive header (using options.connection)', function(done) {
|
||||
send_request({ connection: 'keep-alive' }, function(err, resp) {
|
||||
resp.body.headers['connection'].should.eql('keep-alive');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('one open socket remain after request', function(done) {
|
||||
send_request({ connection: 'keep-alive' }, function(err, resp) {
|
||||
get_active_sockets().length.should.eql(existing_sockets + 1);
|
||||
done();
|
||||
});
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('new node versions with smarter connection disposing', function() {
|
||||
|
||||
before(function() {
|
||||
delete require.cache[require.resolve('..')]
|
||||
original_defaultMaxSockets = http.Agent.defaultMaxSockets;
|
||||
http.Agent.defaultMaxSockets = Infinity;
|
||||
needle = require('..');
|
||||
})
|
||||
|
||||
after(function() {
|
||||
http.Agent.defaultMaxSockets = original_defaultMaxSockets;
|
||||
delete require.cache[require.resolve('..')]
|
||||
})
|
||||
|
||||
describe('default options', function() {
|
||||
|
||||
// TODO:
|
||||
// this is weird. by default, new node versions set a 'close' header
|
||||
// while older versions set a keep-alive header
|
||||
|
||||
it.skip('sets a Connection header', function(done) {
|
||||
send_request({}, function(err, resp) {
|
||||
// should.not.exist(resp.body.headers['connection']);
|
||||
// done();
|
||||
})
|
||||
})
|
||||
|
||||
it.skip('one open sockets remain after request', function(done) {
|
||||
send_request({}, function(err, resp) {
|
||||
// get_active_sockets().length.should.eql(1);
|
||||
// done();
|
||||
});
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('passing connection: close', function() {
|
||||
|
||||
it('sends a Connection: close header', function(done) {
|
||||
send_request({ connection: 'close' }, function(err, resp) {
|
||||
resp.body.headers['connection'].should.eql('close');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('no open sockets remain after request', function(done) {
|
||||
send_request({ connection: 'close' }, function(err, resp) {
|
||||
setTimeout(function() {
|
||||
get_active_sockets().length.should.eql(existing_sockets);
|
||||
done();
|
||||
}, 10);
|
||||
});
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('passing connection: keep-alive', function() {
|
||||
|
||||
it('sends a Connection: keep-alive header (using options.headers.connection)', function(done) {
|
||||
send_request({ headers: { connection: 'keep-alive' }}, function(err, resp) {
|
||||
resp.body.headers['connection'].should.eql('keep-alive');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('sends a Connection: keep-alive header (using options.connection)', function(done) {
|
||||
send_request({ connection: 'keep-alive' }, function(err, resp) {
|
||||
resp.body.headers['connection'].should.eql('keep-alive');
|
||||
done();
|
||||
})
|
||||
})
|
||||
|
||||
it('one open socket remain after request', function(done) {
|
||||
send_request({ connection: 'keep-alive' }, function(err, resp) {
|
||||
get_active_sockets().length.should.eql(existing_sockets + 1);
|
||||
done();
|
||||
});
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
74
node_modules/needle/test/helpers.js
generated
vendored
74
node_modules/needle/test/helpers.js
generated
vendored
|
@ -1,74 +0,0 @@
|
|||
var fs = require('fs');
|
||||
|
||||
var protocols = {
|
||||
http : require('http'),
|
||||
https : require('https')
|
||||
}
|
||||
|
||||
var keys = {
|
||||
cert : fs.readFileSync(__dirname + '/keys/ssl.cert'),
|
||||
key : fs.readFileSync(__dirname + '/keys/ssl.key')
|
||||
}
|
||||
|
||||
var helpers = {};
|
||||
|
||||
helpers.server = function(opts, cb) {
|
||||
|
||||
var defaults = {
|
||||
code : 200,
|
||||
headers : {'Content-Type': 'application/json'}
|
||||
}
|
||||
|
||||
var mirror_response = function(req) {
|
||||
return JSON.stringify({
|
||||
headers: req.headers,
|
||||
body: req.body
|
||||
})
|
||||
}
|
||||
|
||||
var get = function(what) {
|
||||
if (!opts[what])
|
||||
return defaults[what];
|
||||
|
||||
if (typeof opts[what] == 'function')
|
||||
return opts[what](); // set them at runtime
|
||||
else
|
||||
return opts[what];
|
||||
}
|
||||
|
||||
var finish = function(req, res) {
|
||||
if (opts.handler) return opts.handler(req, res);
|
||||
|
||||
res.writeHead(get('code'), get('headers'));
|
||||
res.end(opts.response || mirror_response(req));
|
||||
}
|
||||
|
||||
var handler = function(req, res) {
|
||||
|
||||
req.setEncoding('utf8'); // get as string
|
||||
req.body = '';
|
||||
req.on('data', function(str) { req.body += str })
|
||||
req.socket.on('error', function(e) {
|
||||
// res.writeHead(500, {'Content-Type': 'text/plain'});
|
||||
// res.end('Error: ' + e.message);
|
||||
})
|
||||
|
||||
setTimeout(function(){
|
||||
finish(req, res);
|
||||
}, opts.wait || 0);
|
||||
|
||||
};
|
||||
|
||||
var protocol = opts.protocol || 'http';
|
||||
var server;
|
||||
|
||||
if (protocol == 'https')
|
||||
server = protocols[protocol].createServer(keys, handler);
|
||||
else
|
||||
server = protocols[protocol].createServer(handler);
|
||||
|
||||
server.listen(opts.port, cb);
|
||||
return server;
|
||||
}
|
||||
|
||||
module.exports = helpers;
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue