2020-08-16 00:36:41 +02:00
module . exports =
/******/ ( function ( modules , runtime ) { // webpackBootstrap
/******/ "use strict" ;
/******/ // The module cache
/******/ var installedModules = { } ;
/******/
/******/ // The require function
/******/ function _ _webpack _require _ _ ( moduleId ) {
/******/
/******/ // Check if module is in cache
/******/ if ( installedModules [ moduleId ] ) {
/******/ return installedModules [ moduleId ] . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules [ moduleId ] = {
/******/ i : moduleId ,
/******/ l : false ,
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ var threw = true ;
/******/ try {
/******/ modules [ moduleId ] . call ( module . exports , module , module . exports , _ _webpack _require _ _ ) ;
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete installedModules [ moduleId ] ;
/******/ }
/******/
/******/ // Flag the module as loaded
/******/ module . l = true ;
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/******/
/******/ _ _webpack _require _ _ . ab = _ _dirname + "/" ;
/******/
/******/ // the startup function
/******/ function startup ( ) {
/******/ // Load entry module and return exports
2020-08-21 13:39:42 +02:00
/******/ return _ _webpack _require _ _ ( 109 ) ;
2020-08-16 00:36:41 +02:00
/******/ } ;
/******/
/******/ // run startup
/******/ return startup ( ) ;
/******/ } )
/************************************************************************/
2020-08-17 18:35:15 +02:00
/******/ ( {
2020-08-23 03:31:38 +02:00
/***/ 8 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
const intersects = ( r1 , r2 , options ) => {
r1 = new Range ( r1 , options )
r2 = new Range ( r2 , options )
return r1 . intersects ( r2 )
}
module . exports = intersects
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 10 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
module . exports = globSync
globSync . GlobSync = GlobSync
var fs = _ _webpack _require _ _ ( 747 )
2020-10-19 21:17:06 +02:00
var rp = _ _webpack _require _ _ ( 290 )
2020-09-02 10:07:11 +02:00
var minimatch = _ _webpack _require _ _ ( 973 )
var Minimatch = minimatch . Minimatch
var Glob = _ _webpack _require _ _ ( 957 ) . Glob
var util = _ _webpack _require _ _ ( 669 )
var path = _ _webpack _require _ _ ( 622 )
var assert = _ _webpack _require _ _ ( 357 )
var isAbsolute = _ _webpack _require _ _ ( 714 )
var common = _ _webpack _require _ _ ( 625 )
var alphasort = common . alphasort
var alphasorti = common . alphasorti
var setopts = common . setopts
var ownProp = common . ownProp
var childrenIgnored = common . childrenIgnored
var isIgnored = common . isIgnored
function globSync ( pattern , options ) {
if ( typeof options === 'function' || arguments . length === 3 )
throw new TypeError ( 'callback provided to sync glob\n' +
'See: https://github.com/isaacs/node-glob/issues/167' )
return new GlobSync ( pattern , options ) . found
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
function GlobSync ( pattern , options ) {
if ( ! pattern )
throw new Error ( 'must provide pattern' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( typeof options === 'function' || arguments . length === 3 )
throw new TypeError ( 'callback provided to sync glob\n' +
'See: https://github.com/isaacs/node-glob/issues/167' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! ( this instanceof GlobSync ) )
return new GlobSync ( pattern , options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
setopts ( this , pattern , options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . noprocess )
return this
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var n = this . minimatch . set . length
this . matches = new Array ( n )
for ( var i = 0 ; i < n ; i ++ ) {
this . _process ( this . minimatch . set [ i ] , i , false )
}
this . _finish ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _finish = function ( ) {
assert ( this instanceof GlobSync )
if ( this . realpath ) {
var self = this
this . matches . forEach ( function ( matchset , index ) {
var set = self . matches [ index ] = Object . create ( null )
for ( var p in matchset ) {
try {
p = self . _makeAbs ( p )
var real = rp . realpathSync ( p , self . realpathCache )
set [ real ] = true
} catch ( er ) {
if ( er . syscall === 'stat' )
set [ self . _makeAbs ( p ) ] = true
else
throw er
}
}
} )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
common . finish ( this )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _process = function ( pattern , index , inGlobStar ) {
assert ( this instanceof GlobSync )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Get the first [n] parts of pattern that are all strings.
var n = 0
while ( typeof pattern [ n ] === 'string' ) {
n ++
}
// now n is the index of the first one that is *not* a string.
// See if there's anything else
var prefix
switch ( n ) {
// if not, then this is rather simple
case pattern . length :
this . _processSimple ( pattern . join ( '/' ) , index )
return
case 0 :
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
default :
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern . slice ( 0 , n ) . join ( '/' )
break
}
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
var remain = pattern . slice ( n )
// get the list of entries.
var read
if ( prefix === null )
read = '.'
else if ( isAbsolute ( prefix ) || isAbsolute ( pattern . join ( '/' ) ) ) {
if ( ! prefix || ! isAbsolute ( prefix ) )
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this . _makeAbs ( read )
//if ignored, skip processing
if ( childrenIgnored ( this , read ) )
return
var isGlobStar = remain [ 0 ] === minimatch . GLOBSTAR
if ( isGlobStar )
this . _processGlobStar ( prefix , read , abs , remain , index , inGlobStar )
else
this . _processReaddir ( prefix , read , abs , remain , index , inGlobStar )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _processReaddir = function ( prefix , read , abs , remain , index , inGlobStar ) {
var entries = this . _readdir ( abs , inGlobStar )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// if the abs isn't a dir, then nothing can match!
if ( ! entries )
return
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain [ 0 ]
var negate = ! ! this . minimatch . negate
var rawGlob = pn . _glob
var dotOk = this . dot || rawGlob . charAt ( 0 ) === '.'
var matchedEntries = [ ]
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) !== '.' || dotOk ) {
var m
if ( negate && ! prefix ) {
m = ! e . match ( pn )
2020-08-23 03:31:38 +02:00
} else {
2020-09-02 10:07:11 +02:00
m = e . match ( pn )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( m )
matchedEntries . push ( e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var len = matchedEntries . length
// If there are no matched entries, then nothing matches.
if ( len === 0 )
return
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if ( remain . length === 1 && ! this . mark && ! this . stat ) {
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
if ( prefix ) {
if ( prefix . slice ( - 1 ) !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
if ( e . charAt ( 0 ) === '/' && ! this . nomount ) {
e = path . join ( this . root , e )
}
this . _emitMatch ( index , e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// This was the last one, and no stats were needed
return
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// now test all matched entries as stand-ins for that part
// of the pattern.
remain . shift ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
var newPattern
if ( prefix )
newPattern = [ prefix , e ]
else
newPattern = [ e ]
this . _process ( newPattern . concat ( remain ) , index , inGlobStar )
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _emitMatch = function ( index , e ) {
if ( isIgnored ( this , e ) )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var abs = this . _makeAbs ( e )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . mark )
e = this . _mark ( e )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . absolute ) {
e = abs
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . matches [ index ] [ e ] )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . nodir ) {
var c = this . cache [ abs ]
if ( c === 'DIR' || Array . isArray ( c ) )
return
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . matches [ index ] [ e ] = true
if ( this . stat )
this . _stat ( e )
}
GlobSync . prototype . _readdirInGlobStar = function ( abs ) {
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if ( this . follow )
return this . _readdir ( abs , false )
var entries
var lstat
var stat
try {
lstat = fs . lstatSync ( abs )
} catch ( er ) {
if ( er . code === 'ENOENT' ) {
// lstat failed, doesn't exist
return null
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var isSym = lstat && lstat . isSymbolicLink ( )
this . symlinks [ abs ] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if ( ! isSym && lstat && ! lstat . isDirectory ( ) )
this . cache [ abs ] = 'FILE'
else
entries = this . _readdir ( abs , false )
return entries
}
GlobSync . prototype . _readdir = function ( abs , inGlobStar ) {
var entries
if ( inGlobStar && ! ownProp ( this . symlinks , abs ) )
return this . _readdirInGlobStar ( abs )
if ( ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( ! c || c === 'FILE' )
return null
if ( Array . isArray ( c ) )
return c
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
try {
return this . _readdirEntries ( abs , fs . readdirSync ( abs ) )
} catch ( er ) {
this . _readdirError ( abs , er )
return null
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _readdirEntries = function ( abs , entries ) {
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if ( ! this . mark && ! this . stat ) {
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( abs === '/' )
e = abs + e
else
e = abs + '/' + e
this . cache [ e ] = true
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . cache [ abs ] = entries
// mark and cache dir-ness
return entries
}
GlobSync . prototype . _readdirError = function ( f , er ) {
// handle errors, and cache the information
switch ( er . code ) {
case 'ENOTSUP' : // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR' : // totally normal. means it *does* exist.
var abs = this . _makeAbs ( f )
this . cache [ abs ] = 'FILE'
if ( abs === this . cwdAbs ) {
var error = new Error ( er . code + ' invalid cwd ' + this . cwd )
error . path = this . cwd
error . code = er . code
throw error
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
case 'ENOENT' : // not terribly unusual
case 'ELOOP' :
case 'ENAMETOOLONG' :
case 'UNKNOWN' :
this . cache [ this . _makeAbs ( f ) ] = false
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
default : // some unusual error. Treat as failure.
this . cache [ this . _makeAbs ( f ) ] = false
if ( this . strict )
throw er
if ( ! this . silent )
console . error ( 'glob error' , er )
break
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _processGlobStar = function ( prefix , read , abs , remain , index , inGlobStar ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var entries = this . _readdir ( abs , inGlobStar )
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if ( ! entries )
return
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain . slice ( 1 )
var gspref = prefix ? [ prefix ] : [ ]
var noGlobStar = gspref . concat ( remainWithoutGlobStar )
// the noGlobStar pattern exits the inGlobStar state
this . _process ( noGlobStar , index , false )
var len = entries . length
var isSym = this . symlinks [ abs ]
// If it's a symlink, and we're in a globstar, then stop
if ( isSym && inGlobStar )
return
for ( var i = 0 ; i < len ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) === '.' && ! this . dot )
continue
// these two cases enter the inGlobStar state
var instead = gspref . concat ( entries [ i ] , remainWithoutGlobStar )
this . _process ( instead , index , true )
var below = gspref . concat ( entries [ i ] , remain )
this . _process ( below , index , true )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _processSimple = function ( prefix , index ) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var exists = this . _stat ( prefix )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
// If it doesn't exist, then just mark the lack of results
if ( ! exists )
return
if ( prefix && isAbsolute ( prefix ) && ! this . nomount ) {
var trail = /[\/\\]$/ . test ( prefix )
if ( prefix . charAt ( 0 ) === '/' ) {
prefix = path . join ( this . root , prefix )
} else {
prefix = path . resolve ( this . root , prefix )
if ( trail )
prefix += '/'
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( process . platform === 'win32' )
prefix = prefix . replace ( /\\/g , '/' )
// Mark this as a match
this . _emitMatch ( index , prefix )
}
// Returns either 'DIR', 'FILE', or false
GlobSync . prototype . _stat = function ( f ) {
var abs = this . _makeAbs ( f )
var needDir = f . slice ( - 1 ) === '/'
if ( f . length > this . maxLength )
return false
if ( ! this . stat && ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( Array . isArray ( c ) )
c = 'DIR'
// It exists, but maybe not how we need it
if ( ! needDir || c === 'DIR' )
return c
if ( needDir && c === 'FILE' )
return false
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
var exists
var stat = this . statCache [ abs ]
if ( ! stat ) {
var lstat
try {
lstat = fs . lstatSync ( abs )
} catch ( er ) {
if ( er && ( er . code === 'ENOENT' || er . code === 'ENOTDIR' ) ) {
this . statCache [ abs ] = false
return false
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( lstat && lstat . isSymbolicLink ( ) ) {
try {
stat = fs . statSync ( abs )
} catch ( er ) {
stat = lstat
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
} else {
stat = lstat
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . statCache [ abs ] = stat
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var c = true
if ( stat )
c = stat . isDirectory ( ) ? 'DIR' : 'FILE'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . cache [ abs ] = this . cache [ abs ] || c
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( needDir && c === 'FILE' )
return false
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return c
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _mark = function ( p ) {
return common . mark ( this , p )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _makeAbs = function ( f ) {
return common . makeAbs ( this , f )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 16 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const parse = _ _webpack _require _ _ ( 925 )
const prerelease = ( version , options ) => {
const parsed = parse ( version , options )
return ( parsed && parsed . prerelease . length ) ? parsed . prerelease : null
}
module . exports = prerelease
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 17 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const neq = ( a , b , loose ) => compare ( a , b , loose ) !== 0
module . exports = neq
2020-08-23 03:31:38 +02:00
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 30 :
2020-08-16 00:36:41 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2020-08-21 13:39:42 +02:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2020-09-02 10:07:11 +02:00
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2020-08-21 13:39:42 +02:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-08-16 00:36:41 +02:00
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-09-02 10:07:11 +02:00
exports . getOctokitOptions = exports . GitHub = exports . context = void 0 ;
const Context = _ _importStar ( _ _webpack _require _ _ ( 53 ) ) ;
const Utils = _ _importStar ( _ _webpack _require _ _ ( 914 ) ) ;
// octokit + plugins
const core _1 = _ _webpack _require _ _ ( 762 ) ;
const plugin _rest _endpoint _methods _1 = _ _webpack _require _ _ ( 44 ) ;
2020-10-23 18:21:44 +02:00
const plugin _paginate _rest _1 = _ _webpack _require _ _ ( 193 ) ;
2020-09-02 10:07:11 +02:00
exports . context = new Context . Context ( ) ;
const baseUrl = Utils . getApiBaseUrl ( ) ;
const defaults = {
baseUrl ,
request : {
agent : Utils . getProxyAgent ( baseUrl )
}
} ;
exports . GitHub = core _1 . Octokit . plugin ( plugin _rest _endpoint _methods _1 . restEndpointMethods , plugin _paginate _rest _1 . paginateRest ) . defaults ( defaults ) ;
/ * *
* Convience function to correctly format Octokit Options to pass into the constructor .
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokitOptions ( token , options ) {
const opts = Object . assign ( { } , options || { } ) ; // Shallow clone - don't mutate the object provided by the caller
// Auth
const auth = Utils . getAuthString ( token , opts ) ;
if ( auth ) {
opts . auth = auth ;
}
return opts ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
exports . getOctokitOptions = getOctokitOptions ;
//# sourceMappingURL=utils.js.map
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 38 :
/***/ ( function ( module ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/ * !
* is - plain - object < https : //github.com/jonschlinkert/is-plain-object>
*
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
* /
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
function isObject ( o ) {
return Object . prototype . toString . call ( o ) === '[object Object]' ;
}
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
function isPlainObject ( o ) {
var ctor , prot ;
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
if ( isObject ( o ) === false ) return false ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// If has modified constructor
ctor = o . constructor ;
if ( ctor === undefined ) return true ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// If has modified prototype
prot = ctor . prototype ;
if ( isObject ( prot ) === false ) return false ;
// If constructor does not have an Object-specific method
if ( prot . hasOwnProperty ( 'isPrototypeOf' ) === false ) {
return false ;
}
// Most likely a plain Object
return true ;
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = isPlainObject ;
2020-08-23 03:31:38 +02:00
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 44 :
/***/ ( function ( _ _unusedmodule , exports ) {
2020-08-21 13:39:42 +02:00
"use strict" ;
2020-09-02 10:07:11 +02:00
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
const Endpoints = {
actions : {
addSelectedRepoToOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ] ,
cancelWorkflowRun : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" ] ,
createOrUpdateOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}" ] ,
createOrUpdateRepoSecret : [ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
createRegistrationTokenForOrg : [ "POST /orgs/{org}/actions/runners/registration-token" ] ,
createRegistrationTokenForRepo : [ "POST /repos/{owner}/{repo}/actions/runners/registration-token" ] ,
createRemoveTokenForOrg : [ "POST /orgs/{org}/actions/runners/remove-token" ] ,
createRemoveTokenForRepo : [ "POST /repos/{owner}/{repo}/actions/runners/remove-token" ] ,
createWorkflowDispatch : [ "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" ] ,
deleteArtifact : [ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
deleteOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}" ] ,
deleteRepoSecret : [ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
deleteSelfHostedRunnerFromOrg : [ "DELETE /orgs/{org}/actions/runners/{runner_id}" ] ,
deleteSelfHostedRunnerFromRepo : [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" ] ,
deleteWorkflowRun : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
deleteWorkflowRunLogs : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ] ,
downloadArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" ] ,
downloadJobLogsForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" ] ,
downloadWorkflowRunLogs : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ] ,
getArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
getJobForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/actions/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}" ] ,
getRepoPublicKey : [ "GET /repos/{owner}/{repo}/actions/secrets/public-key" ] ,
getRepoSecret : [ "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
getSelfHostedRunnerForOrg : [ "GET /orgs/{org}/actions/runners/{runner_id}" ] ,
getSelfHostedRunnerForRepo : [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" ] ,
getWorkflow : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}" ] ,
getWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
getWorkflowRunUsage : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" ] ,
getWorkflowUsage : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" ] ,
listArtifactsForRepo : [ "GET /repos/{owner}/{repo}/actions/artifacts" ] ,
listJobsForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" ] ,
listOrgSecrets : [ "GET /orgs/{org}/actions/secrets" ] ,
listRepoSecrets : [ "GET /repos/{owner}/{repo}/actions/secrets" ] ,
listRepoWorkflows : [ "GET /repos/{owner}/{repo}/actions/workflows" ] ,
listRunnerApplicationsForOrg : [ "GET /orgs/{org}/actions/runners/downloads" ] ,
listRunnerApplicationsForRepo : [ "GET /repos/{owner}/{repo}/actions/runners/downloads" ] ,
listSelectedReposForOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" ] ,
listSelfHostedRunnersForOrg : [ "GET /orgs/{org}/actions/runners" ] ,
listSelfHostedRunnersForRepo : [ "GET /repos/{owner}/{repo}/actions/runners" ] ,
listWorkflowRunArtifacts : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" ] ,
listWorkflowRuns : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" ] ,
listWorkflowRunsForRepo : [ "GET /repos/{owner}/{repo}/actions/runs" ] ,
reRunWorkflow : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun" ] ,
removeSelectedRepoFromOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ] ,
setSelectedReposForOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" ]
} ,
activity : {
checkRepoIsStarredByAuthenticatedUser : [ "GET /user/starred/{owner}/{repo}" ] ,
deleteRepoSubscription : [ "DELETE /repos/{owner}/{repo}/subscription" ] ,
deleteThreadSubscription : [ "DELETE /notifications/threads/{thread_id}/subscription" ] ,
getFeeds : [ "GET /feeds" ] ,
getRepoSubscription : [ "GET /repos/{owner}/{repo}/subscription" ] ,
getThread : [ "GET /notifications/threads/{thread_id}" ] ,
getThreadSubscriptionForAuthenticatedUser : [ "GET /notifications/threads/{thread_id}/subscription" ] ,
listEventsForAuthenticatedUser : [ "GET /users/{username}/events" ] ,
listNotificationsForAuthenticatedUser : [ "GET /notifications" ] ,
listOrgEventsForAuthenticatedUser : [ "GET /users/{username}/events/orgs/{org}" ] ,
listPublicEvents : [ "GET /events" ] ,
listPublicEventsForRepoNetwork : [ "GET /networks/{owner}/{repo}/events" ] ,
listPublicEventsForUser : [ "GET /users/{username}/events/public" ] ,
listPublicOrgEvents : [ "GET /orgs/{org}/events" ] ,
listReceivedEventsForUser : [ "GET /users/{username}/received_events" ] ,
listReceivedPublicEventsForUser : [ "GET /users/{username}/received_events/public" ] ,
listRepoEvents : [ "GET /repos/{owner}/{repo}/events" ] ,
listRepoNotificationsForAuthenticatedUser : [ "GET /repos/{owner}/{repo}/notifications" ] ,
listReposStarredByAuthenticatedUser : [ "GET /user/starred" ] ,
listReposStarredByUser : [ "GET /users/{username}/starred" ] ,
listReposWatchedByUser : [ "GET /users/{username}/subscriptions" ] ,
listStargazersForRepo : [ "GET /repos/{owner}/{repo}/stargazers" ] ,
listWatchedReposForAuthenticatedUser : [ "GET /user/subscriptions" ] ,
listWatchersForRepo : [ "GET /repos/{owner}/{repo}/subscribers" ] ,
markNotificationsAsRead : [ "PUT /notifications" ] ,
markRepoNotificationsAsRead : [ "PUT /repos/{owner}/{repo}/notifications" ] ,
markThreadAsRead : [ "PATCH /notifications/threads/{thread_id}" ] ,
setRepoSubscription : [ "PUT /repos/{owner}/{repo}/subscription" ] ,
setThreadSubscription : [ "PUT /notifications/threads/{thread_id}/subscription" ] ,
starRepoForAuthenticatedUser : [ "PUT /user/starred/{owner}/{repo}" ] ,
unstarRepoForAuthenticatedUser : [ "DELETE /user/starred/{owner}/{repo}" ]
} ,
apps : {
addRepoToInstallation : [ "PUT /user/installations/{installation_id}/repositories/{repository_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
checkToken : [ "POST /applications/{client_id}/token" ] ,
createContentAttachment : [ "POST /content_references/{content_reference_id}/attachments" , {
mediaType : {
previews : [ "corsair" ]
}
} ] ,
createFromManifest : [ "POST /app-manifests/{code}/conversions" ] ,
createInstallationAccessToken : [ "POST /app/installations/{installation_id}/access_tokens" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
deleteAuthorization : [ "DELETE /applications/{client_id}/grant" ] ,
deleteInstallation : [ "DELETE /app/installations/{installation_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
deleteToken : [ "DELETE /applications/{client_id}/token" ] ,
getAuthenticated : [ "GET /app" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getBySlug : [ "GET /apps/{app_slug}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getInstallation : [ "GET /app/installations/{installation_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getOrgInstallation : [ "GET /orgs/{org}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getRepoInstallation : [ "GET /repos/{owner}/{repo}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getSubscriptionPlanForAccount : [ "GET /marketplace_listing/accounts/{account_id}" ] ,
getSubscriptionPlanForAccountStubbed : [ "GET /marketplace_listing/stubbed/accounts/{account_id}" ] ,
getUserInstallation : [ "GET /users/{username}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listAccountsForPlan : [ "GET /marketplace_listing/plans/{plan_id}/accounts" ] ,
listAccountsForPlanStubbed : [ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" ] ,
listInstallationReposForAuthenticatedUser : [ "GET /user/installations/{installation_id}/repositories" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listInstallations : [ "GET /app/installations" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listInstallationsForAuthenticatedUser : [ "GET /user/installations" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listPlans : [ "GET /marketplace_listing/plans" ] ,
listPlansStubbed : [ "GET /marketplace_listing/stubbed/plans" ] ,
listReposAccessibleToInstallation : [ "GET /installation/repositories" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listSubscriptionsForAuthenticatedUser : [ "GET /user/marketplace_purchases" ] ,
listSubscriptionsForAuthenticatedUserStubbed : [ "GET /user/marketplace_purchases/stubbed" ] ,
removeRepoFromInstallation : [ "DELETE /user/installations/{installation_id}/repositories/{repository_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
resetToken : [ "PATCH /applications/{client_id}/token" ] ,
revokeInstallationAccessToken : [ "DELETE /installation/token" ] ,
suspendInstallation : [ "PUT /app/installations/{installation_id}/suspended" ] ,
unsuspendInstallation : [ "DELETE /app/installations/{installation_id}/suspended" ]
} ,
billing : {
getGithubActionsBillingOrg : [ "GET /orgs/{org}/settings/billing/actions" ] ,
getGithubActionsBillingUser : [ "GET /users/{username}/settings/billing/actions" ] ,
getGithubPackagesBillingOrg : [ "GET /orgs/{org}/settings/billing/packages" ] ,
getGithubPackagesBillingUser : [ "GET /users/{username}/settings/billing/packages" ] ,
getSharedStorageBillingOrg : [ "GET /orgs/{org}/settings/billing/shared-storage" ] ,
getSharedStorageBillingUser : [ "GET /users/{username}/settings/billing/shared-storage" ]
} ,
checks : {
create : [ "POST /repos/{owner}/{repo}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
createSuite : [ "POST /repos/{owner}/{repo}/check-suites" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
get : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
getSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
listAnnotations : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
listForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
listForSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
listSuitesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
rerequestSuite : [ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
setSuitesPreferences : [ "PATCH /repos/{owner}/{repo}/check-suites/preferences" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
update : [ "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}" , {
mediaType : {
previews : [ "antiope" ]
}
} ]
} ,
codeScanning : {
getAlert : [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/code-scanning/alerts" ]
} ,
codesOfConduct : {
getAllCodesOfConduct : [ "GET /codes_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ] ,
getConductCode : [ "GET /codes_of_conduct/{key}" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/community/code_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ]
} ,
emojis : {
get : [ "GET /emojis" ]
} ,
gists : {
checkIsStarred : [ "GET /gists/{gist_id}/star" ] ,
create : [ "POST /gists" ] ,
createComment : [ "POST /gists/{gist_id}/comments" ] ,
delete : [ "DELETE /gists/{gist_id}" ] ,
deleteComment : [ "DELETE /gists/{gist_id}/comments/{comment_id}" ] ,
fork : [ "POST /gists/{gist_id}/forks" ] ,
get : [ "GET /gists/{gist_id}" ] ,
getComment : [ "GET /gists/{gist_id}/comments/{comment_id}" ] ,
getRevision : [ "GET /gists/{gist_id}/{sha}" ] ,
list : [ "GET /gists" ] ,
listComments : [ "GET /gists/{gist_id}/comments" ] ,
listCommits : [ "GET /gists/{gist_id}/commits" ] ,
listForUser : [ "GET /users/{username}/gists" ] ,
listForks : [ "GET /gists/{gist_id}/forks" ] ,
listPublic : [ "GET /gists/public" ] ,
listStarred : [ "GET /gists/starred" ] ,
star : [ "PUT /gists/{gist_id}/star" ] ,
unstar : [ "DELETE /gists/{gist_id}/star" ] ,
update : [ "PATCH /gists/{gist_id}" ] ,
updateComment : [ "PATCH /gists/{gist_id}/comments/{comment_id}" ]
} ,
git : {
createBlob : [ "POST /repos/{owner}/{repo}/git/blobs" ] ,
createCommit : [ "POST /repos/{owner}/{repo}/git/commits" ] ,
createRef : [ "POST /repos/{owner}/{repo}/git/refs" ] ,
createTag : [ "POST /repos/{owner}/{repo}/git/tags" ] ,
createTree : [ "POST /repos/{owner}/{repo}/git/trees" ] ,
deleteRef : [ "DELETE /repos/{owner}/{repo}/git/refs/{ref}" ] ,
getBlob : [ "GET /repos/{owner}/{repo}/git/blobs/{file_sha}" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/git/commits/{commit_sha}" ] ,
getRef : [ "GET /repos/{owner}/{repo}/git/ref/{ref}" ] ,
getTag : [ "GET /repos/{owner}/{repo}/git/tags/{tag_sha}" ] ,
getTree : [ "GET /repos/{owner}/{repo}/git/trees/{tree_sha}" ] ,
listMatchingRefs : [ "GET /repos/{owner}/{repo}/git/matching-refs/{ref}" ] ,
updateRef : [ "PATCH /repos/{owner}/{repo}/git/refs/{ref}" ]
} ,
gitignore : {
getAllTemplates : [ "GET /gitignore/templates" ] ,
getTemplate : [ "GET /gitignore/templates/{name}" ]
} ,
interactions : {
getRestrictionsForOrg : [ "GET /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
getRestrictionsForRepo : [ "GET /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
removeRestrictionsForOrg : [ "DELETE /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
removeRestrictionsForRepo : [ "DELETE /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
setRestrictionsForOrg : [ "PUT /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
setRestrictionsForRepo : [ "PUT /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ]
} ,
issues : {
addAssignees : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" ] ,
addLabels : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
checkUserCanBeAssigned : [ "GET /repos/{owner}/{repo}/assignees/{assignee}" ] ,
create : [ "POST /repos/{owner}/{repo}/issues" ] ,
createComment : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
createLabel : [ "POST /repos/{owner}/{repo}/labels" ] ,
createMilestone : [ "POST /repos/{owner}/{repo}/milestones" ] ,
deleteComment : [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
deleteLabel : [ "DELETE /repos/{owner}/{repo}/labels/{name}" ] ,
deleteMilestone : [ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
get : [ "GET /repos/{owner}/{repo}/issues/{issue_number}" ] ,
getComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
getEvent : [ "GET /repos/{owner}/{repo}/issues/events/{event_id}" ] ,
getLabel : [ "GET /repos/{owner}/{repo}/labels/{name}" ] ,
getMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
list : [ "GET /issues" ] ,
listAssignees : [ "GET /repos/{owner}/{repo}/assignees" ] ,
listComments : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
listCommentsForRepo : [ "GET /repos/{owner}/{repo}/issues/comments" ] ,
listEvents : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/events" ] ,
listEventsForRepo : [ "GET /repos/{owner}/{repo}/issues/events" ] ,
listEventsForTimeline : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" , {
mediaType : {
previews : [ "mockingbird" ]
}
} ] ,
listForAuthenticatedUser : [ "GET /user/issues" ] ,
listForOrg : [ "GET /orgs/{org}/issues" ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/issues" ] ,
listLabelsForMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" ] ,
listLabelsForRepo : [ "GET /repos/{owner}/{repo}/labels" ] ,
listLabelsOnIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
listMilestones : [ "GET /repos/{owner}/{repo}/milestones" ] ,
lock : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
removeAllLabels : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
removeAssignees : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" ] ,
removeLabel : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" ] ,
setLabels : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
unlock : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
update : [ "PATCH /repos/{owner}/{repo}/issues/{issue_number}" ] ,
updateComment : [ "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
updateLabel : [ "PATCH /repos/{owner}/{repo}/labels/{name}" ] ,
updateMilestone : [ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" ]
} ,
licenses : {
get : [ "GET /licenses/{license}" ] ,
getAllCommonlyUsed : [ "GET /licenses" ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/license" ]
} ,
markdown : {
render : [ "POST /markdown" ] ,
renderRaw : [ "POST /markdown/raw" , {
headers : {
"content-type" : "text/plain; charset=utf-8"
}
} ]
} ,
meta : {
get : [ "GET /meta" ]
} ,
migrations : {
cancelImport : [ "DELETE /repos/{owner}/{repo}/import" ] ,
deleteArchiveForAuthenticatedUser : [ "DELETE /user/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
deleteArchiveForOrg : [ "DELETE /orgs/{org}/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
downloadArchiveForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getArchiveForAuthenticatedUser : [ "GET /user/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getCommitAuthors : [ "GET /repos/{owner}/{repo}/import/authors" ] ,
getImportStatus : [ "GET /repos/{owner}/{repo}/import" ] ,
getLargeFiles : [ "GET /repos/{owner}/{repo}/import/large_files" ] ,
getStatusForAuthenticatedUser : [ "GET /user/migrations/{migration_id}" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getStatusForOrg : [ "GET /orgs/{org}/migrations/{migration_id}" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listForAuthenticatedUser : [ "GET /user/migrations" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listForOrg : [ "GET /orgs/{org}/migrations" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listReposForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/repositories" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listReposForUser : [ "GET /user/migrations/{migration_id}/repositories" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
mapCommitAuthor : [ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}" ] ,
setLfsPreference : [ "PATCH /repos/{owner}/{repo}/import/lfs" ] ,
startForAuthenticatedUser : [ "POST /user/migrations" ] ,
startForOrg : [ "POST /orgs/{org}/migrations" ] ,
startImport : [ "PUT /repos/{owner}/{repo}/import" ] ,
unlockRepoForAuthenticatedUser : [ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
unlockRepoForOrg : [ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
updateImport : [ "PATCH /repos/{owner}/{repo}/import" ]
} ,
orgs : {
blockUser : [ "PUT /orgs/{org}/blocks/{username}" ] ,
checkBlockedUser : [ "GET /orgs/{org}/blocks/{username}" ] ,
checkMembershipForUser : [ "GET /orgs/{org}/members/{username}" ] ,
checkPublicMembershipForUser : [ "GET /orgs/{org}/public_members/{username}" ] ,
convertMemberToOutsideCollaborator : [ "PUT /orgs/{org}/outside_collaborators/{username}" ] ,
createInvitation : [ "POST /orgs/{org}/invitations" ] ,
createWebhook : [ "POST /orgs/{org}/hooks" ] ,
deleteWebhook : [ "DELETE /orgs/{org}/hooks/{hook_id}" ] ,
get : [ "GET /orgs/{org}" ] ,
getMembershipForAuthenticatedUser : [ "GET /user/memberships/orgs/{org}" ] ,
getMembershipForUser : [ "GET /orgs/{org}/memberships/{username}" ] ,
getWebhook : [ "GET /orgs/{org}/hooks/{hook_id}" ] ,
list : [ "GET /organizations" ] ,
listAppInstallations : [ "GET /orgs/{org}/installations" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listBlockedUsers : [ "GET /orgs/{org}/blocks" ] ,
listForAuthenticatedUser : [ "GET /user/orgs" ] ,
listForUser : [ "GET /users/{username}/orgs" ] ,
listInvitationTeams : [ "GET /orgs/{org}/invitations/{invitation_id}/teams" ] ,
listMembers : [ "GET /orgs/{org}/members" ] ,
listMembershipsForAuthenticatedUser : [ "GET /user/memberships/orgs" ] ,
listOutsideCollaborators : [ "GET /orgs/{org}/outside_collaborators" ] ,
listPendingInvitations : [ "GET /orgs/{org}/invitations" ] ,
listPublicMembers : [ "GET /orgs/{org}/public_members" ] ,
listWebhooks : [ "GET /orgs/{org}/hooks" ] ,
pingWebhook : [ "POST /orgs/{org}/hooks/{hook_id}/pings" ] ,
removeMember : [ "DELETE /orgs/{org}/members/{username}" ] ,
removeMembershipForUser : [ "DELETE /orgs/{org}/memberships/{username}" ] ,
removeOutsideCollaborator : [ "DELETE /orgs/{org}/outside_collaborators/{username}" ] ,
removePublicMembershipForAuthenticatedUser : [ "DELETE /orgs/{org}/public_members/{username}" ] ,
setMembershipForUser : [ "PUT /orgs/{org}/memberships/{username}" ] ,
setPublicMembershipForAuthenticatedUser : [ "PUT /orgs/{org}/public_members/{username}" ] ,
unblockUser : [ "DELETE /orgs/{org}/blocks/{username}" ] ,
update : [ "PATCH /orgs/{org}" ] ,
updateMembershipForAuthenticatedUser : [ "PATCH /user/memberships/orgs/{org}" ] ,
updateWebhook : [ "PATCH /orgs/{org}/hooks/{hook_id}" ]
} ,
projects : {
addCollaborator : [ "PUT /projects/{project_id}/collaborators/{username}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createCard : [ "POST /projects/columns/{column_id}/cards" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createColumn : [ "POST /projects/{project_id}/columns" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForAuthenticatedUser : [ "POST /user/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForOrg : [ "POST /orgs/{org}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForRepo : [ "POST /repos/{owner}/{repo}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
delete : [ "DELETE /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
deleteCard : [ "DELETE /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
deleteColumn : [ "DELETE /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
get : [ "GET /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getCard : [ "GET /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getColumn : [ "GET /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getPermissionForUser : [ "GET /projects/{project_id}/collaborators/{username}/permission" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listCards : [ "GET /projects/columns/{column_id}/cards" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listCollaborators : [ "GET /projects/{project_id}/collaborators" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listColumns : [ "GET /projects/{project_id}/columns" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForOrg : [ "GET /orgs/{org}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForUser : [ "GET /users/{username}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
moveCard : [ "POST /projects/columns/cards/{card_id}/moves" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
moveColumn : [ "POST /projects/columns/{column_id}/moves" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
removeCollaborator : [ "DELETE /projects/{project_id}/collaborators/{username}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
update : [ "PATCH /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
updateCard : [ "PATCH /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
updateColumn : [ "PATCH /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ]
} ,
pulls : {
checkIfMerged : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
create : [ "POST /repos/{owner}/{repo}/pulls" ] ,
createReplyForReviewComment : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" ] ,
createReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
createReviewComment : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" ] ,
deletePendingReview : [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
deleteReviewComment : [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
dismissReview : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" ] ,
get : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
getReview : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
getReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
list : [ "GET /repos/{owner}/{repo}/pulls" ] ,
listCommentsForReview : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ] ,
listCommits : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits" ] ,
listFiles : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files" ] ,
listRequestedReviewers : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
listReviewComments : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ] ,
listReviewCommentsForRepo : [ "GET /repos/{owner}/{repo}/pulls/comments" ] ,
listReviews : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
merge : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
removeRequestedReviewers : [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
requestReviewers : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
submitReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" ] ,
update : [ "PATCH /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
updateBranch : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" , {
mediaType : {
previews : [ "lydian" ]
}
} ] ,
updateReview : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
updateReviewComment : [ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" ]
} ,
rateLimit : {
get : [ "GET /rate_limit" ]
} ,
reactions : {
createForCommitComment : [ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForIssue : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForIssueComment : [ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForPullRequestReviewComment : [ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForTeamDiscussionCommentInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForTeamDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForIssue : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForIssueComment : [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForPullRequestComment : [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForTeamDiscussion : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForTeamDiscussionComment : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteLegacy : [ "DELETE /reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} , {
deprecated : "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy"
} ] ,
listForCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForIssueComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForPullRequestReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForTeamDiscussionCommentInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForTeamDiscussionInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ]
} ,
repos : {
acceptInvitation : [ "PATCH /user/repository_invitations/{invitation_id}" ] ,
addAppAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
addCollaborator : [ "PUT /repos/{owner}/{repo}/collaborators/{username}" ] ,
addStatusCheckContexts : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
addTeamAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
addUserAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
checkCollaborator : [ "GET /repos/{owner}/{repo}/collaborators/{username}" ] ,
checkVulnerabilityAlerts : [ "GET /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
compareCommits : [ "GET /repos/{owner}/{repo}/compare/{base}...{head}" ] ,
createCommitComment : [ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" ] ,
createCommitSignatureProtection : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
createCommitStatus : [ "POST /repos/{owner}/{repo}/statuses/{sha}" ] ,
createDeployKey : [ "POST /repos/{owner}/{repo}/keys" ] ,
createDeployment : [ "POST /repos/{owner}/{repo}/deployments" ] ,
createDeploymentStatus : [ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ] ,
createDispatchEvent : [ "POST /repos/{owner}/{repo}/dispatches" ] ,
createForAuthenticatedUser : [ "POST /user/repos" ] ,
createFork : [ "POST /repos/{owner}/{repo}/forks" ] ,
createInOrg : [ "POST /orgs/{org}/repos" ] ,
createOrUpdateFileContents : [ "PUT /repos/{owner}/{repo}/contents/{path}" ] ,
createPagesSite : [ "POST /repos/{owner}/{repo}/pages" , {
mediaType : {
previews : [ "switcheroo" ]
}
} ] ,
createRelease : [ "POST /repos/{owner}/{repo}/releases" ] ,
createUsingTemplate : [ "POST /repos/{template_owner}/{template_repo}/generate" , {
mediaType : {
previews : [ "baptiste" ]
}
} ] ,
createWebhook : [ "POST /repos/{owner}/{repo}/hooks" ] ,
declineInvitation : [ "DELETE /user/repository_invitations/{invitation_id}" ] ,
delete : [ "DELETE /repos/{owner}/{repo}" ] ,
deleteAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ] ,
deleteAdminBranchProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
deleteBranchProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
deleteCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}" ] ,
deleteCommitSignatureProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
deleteDeployKey : [ "DELETE /repos/{owner}/{repo}/keys/{key_id}" ] ,
deleteDeployment : [ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
deleteFile : [ "DELETE /repos/{owner}/{repo}/contents/{path}" ] ,
deleteInvitation : [ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" ] ,
deletePagesSite : [ "DELETE /repos/{owner}/{repo}/pages" , {
mediaType : {
previews : [ "switcheroo" ]
}
} ] ,
deletePullRequestReviewProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
deleteRelease : [ "DELETE /repos/{owner}/{repo}/releases/{release_id}" ] ,
deleteReleaseAsset : [ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
deleteWebhook : [ "DELETE /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
disableAutomatedSecurityFixes : [ "DELETE /repos/{owner}/{repo}/automated-security-fixes" , {
mediaType : {
previews : [ "london" ]
}
} ] ,
disableVulnerabilityAlerts : [ "DELETE /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
downloadArchive : [ "GET /repos/{owner}/{repo}/{archive_format}/{ref}" ] ,
enableAutomatedSecurityFixes : [ "PUT /repos/{owner}/{repo}/automated-security-fixes" , {
mediaType : {
previews : [ "london" ]
}
} ] ,
enableVulnerabilityAlerts : [ "PUT /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
get : [ "GET /repos/{owner}/{repo}" ] ,
getAccessRestrictions : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ] ,
getAdminBranchProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
getAllStatusCheckContexts : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ] ,
getAllTopics : [ "GET /repos/{owner}/{repo}/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
getAppsWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ] ,
getBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}" ] ,
getBranchProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
getClones : [ "GET /repos/{owner}/{repo}/traffic/clones" ] ,
getCodeFrequencyStats : [ "GET /repos/{owner}/{repo}/stats/code_frequency" ] ,
getCollaboratorPermissionLevel : [ "GET /repos/{owner}/{repo}/collaborators/{username}/permission" ] ,
getCombinedStatusForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/status" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/commits/{ref}" ] ,
getCommitActivityStats : [ "GET /repos/{owner}/{repo}/stats/commit_activity" ] ,
getCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}" ] ,
getCommitSignatureProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
getCommunityProfileMetrics : [ "GET /repos/{owner}/{repo}/community/profile" , {
mediaType : {
previews : [ "black-panther" ]
}
} ] ,
getContent : [ "GET /repos/{owner}/{repo}/contents/{path}" ] ,
getContributorsStats : [ "GET /repos/{owner}/{repo}/stats/contributors" ] ,
getDeployKey : [ "GET /repos/{owner}/{repo}/keys/{key_id}" ] ,
getDeployment : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
getDeploymentStatus : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" ] ,
getLatestPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/latest" ] ,
getLatestRelease : [ "GET /repos/{owner}/{repo}/releases/latest" ] ,
getPages : [ "GET /repos/{owner}/{repo}/pages" ] ,
getPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/{build_id}" ] ,
getParticipationStats : [ "GET /repos/{owner}/{repo}/stats/participation" ] ,
getPullRequestReviewProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
getPunchCardStats : [ "GET /repos/{owner}/{repo}/stats/punch_card" ] ,
getReadme : [ "GET /repos/{owner}/{repo}/readme" ] ,
getRelease : [ "GET /repos/{owner}/{repo}/releases/{release_id}" ] ,
getReleaseAsset : [ "GET /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
getReleaseByTag : [ "GET /repos/{owner}/{repo}/releases/tags/{tag}" ] ,
getStatusChecksProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
getTeamsWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ] ,
getTopPaths : [ "GET /repos/{owner}/{repo}/traffic/popular/paths" ] ,
getTopReferrers : [ "GET /repos/{owner}/{repo}/traffic/popular/referrers" ] ,
getUsersWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ] ,
getViews : [ "GET /repos/{owner}/{repo}/traffic/views" ] ,
getWebhook : [ "GET /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
listBranches : [ "GET /repos/{owner}/{repo}/branches" ] ,
listBranchesForHeadCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" , {
mediaType : {
previews : [ "groot" ]
}
} ] ,
listCollaborators : [ "GET /repos/{owner}/{repo}/collaborators" ] ,
listCommentsForCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" ] ,
listCommitCommentsForRepo : [ "GET /repos/{owner}/{repo}/comments" ] ,
listCommitStatusesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/statuses" ] ,
listCommits : [ "GET /repos/{owner}/{repo}/commits" ] ,
listContributors : [ "GET /repos/{owner}/{repo}/contributors" ] ,
listDeployKeys : [ "GET /repos/{owner}/{repo}/keys" ] ,
listDeploymentStatuses : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ] ,
listDeployments : [ "GET /repos/{owner}/{repo}/deployments" ] ,
listForAuthenticatedUser : [ "GET /user/repos" ] ,
listForOrg : [ "GET /orgs/{org}/repos" ] ,
listForUser : [ "GET /users/{username}/repos" ] ,
listForks : [ "GET /repos/{owner}/{repo}/forks" ] ,
listInvitations : [ "GET /repos/{owner}/{repo}/invitations" ] ,
listInvitationsForAuthenticatedUser : [ "GET /user/repository_invitations" ] ,
listLanguages : [ "GET /repos/{owner}/{repo}/languages" ] ,
listPagesBuilds : [ "GET /repos/{owner}/{repo}/pages/builds" ] ,
listPublic : [ "GET /repositories" ] ,
listPullRequestsAssociatedWithCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" , {
mediaType : {
previews : [ "groot" ]
}
} ] ,
listReleaseAssets : [ "GET /repos/{owner}/{repo}/releases/{release_id}/assets" ] ,
listReleases : [ "GET /repos/{owner}/{repo}/releases" ] ,
listTags : [ "GET /repos/{owner}/{repo}/tags" ] ,
listTeams : [ "GET /repos/{owner}/{repo}/teams" ] ,
listWebhooks : [ "GET /repos/{owner}/{repo}/hooks" ] ,
merge : [ "POST /repos/{owner}/{repo}/merges" ] ,
pingWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings" ] ,
removeAppAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
removeCollaborator : [ "DELETE /repos/{owner}/{repo}/collaborators/{username}" ] ,
removeStatusCheckContexts : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
removeStatusCheckProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
removeTeamAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
removeUserAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
replaceAllTopics : [ "PUT /repos/{owner}/{repo}/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
requestPagesBuild : [ "POST /repos/{owner}/{repo}/pages/builds" ] ,
setAdminBranchProtection : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
setAppAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
setStatusCheckContexts : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
setTeamAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
setUserAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
testPushWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests" ] ,
transfer : [ "POST /repos/{owner}/{repo}/transfer" ] ,
update : [ "PATCH /repos/{owner}/{repo}" ] ,
updateBranchProtection : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
updateCommitComment : [ "PATCH /repos/{owner}/{repo}/comments/{comment_id}" ] ,
updateInformationAboutPagesSite : [ "PUT /repos/{owner}/{repo}/pages" ] ,
updateInvitation : [ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" ] ,
updatePullRequestReviewProtection : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
updateRelease : [ "PATCH /repos/{owner}/{repo}/releases/{release_id}" ] ,
updateReleaseAsset : [ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
updateStatusCheckPotection : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
updateWebhook : [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
uploadReleaseAsset : [ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}" , {
baseUrl : "https://uploads.github.com"
} ]
} ,
search : {
code : [ "GET /search/code" ] ,
commits : [ "GET /search/commits" , {
mediaType : {
previews : [ "cloak" ]
}
} ] ,
issuesAndPullRequests : [ "GET /search/issues" ] ,
labels : [ "GET /search/labels" ] ,
repos : [ "GET /search/repositories" ] ,
topics : [ "GET /search/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
users : [ "GET /search/users" ]
} ,
teams : {
addOrUpdateMembershipForUserInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
addOrUpdateProjectPermissionsInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
addOrUpdateRepoPermissionsInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
checkPermissionsForProjectInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
checkPermissionsForRepoInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
create : [ "POST /orgs/{org}/teams" ] ,
createDiscussionCommentInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ] ,
createDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions" ] ,
deleteDiscussionCommentInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
deleteDiscussionInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
deleteInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}" ] ,
getByName : [ "GET /orgs/{org}/teams/{team_slug}" ] ,
getDiscussionCommentInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
getDiscussionInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
getMembershipForUserInOrg : [ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
list : [ "GET /orgs/{org}/teams" ] ,
listChildInOrg : [ "GET /orgs/{org}/teams/{team_slug}/teams" ] ,
listDiscussionCommentsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ] ,
listDiscussionsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions" ] ,
listForAuthenticatedUser : [ "GET /user/teams" ] ,
listMembersInOrg : [ "GET /orgs/{org}/teams/{team_slug}/members" ] ,
listPendingInvitationsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/invitations" ] ,
listProjectsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listReposInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos" ] ,
removeMembershipForUserInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
removeProjectInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" ] ,
removeRepoInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
updateDiscussionCommentInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
updateDiscussionInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
updateInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}" ]
} ,
users : {
addEmailForAuthenticated : [ "POST /user/emails" ] ,
block : [ "PUT /user/blocks/{username}" ] ,
checkBlocked : [ "GET /user/blocks/{username}" ] ,
checkFollowingForUser : [ "GET /users/{username}/following/{target_user}" ] ,
checkPersonIsFollowedByAuthenticated : [ "GET /user/following/{username}" ] ,
createGpgKeyForAuthenticated : [ "POST /user/gpg_keys" ] ,
createPublicSshKeyForAuthenticated : [ "POST /user/keys" ] ,
deleteEmailForAuthenticated : [ "DELETE /user/emails" ] ,
deleteGpgKeyForAuthenticated : [ "DELETE /user/gpg_keys/{gpg_key_id}" ] ,
deletePublicSshKeyForAuthenticated : [ "DELETE /user/keys/{key_id}" ] ,
follow : [ "PUT /user/following/{username}" ] ,
getAuthenticated : [ "GET /user" ] ,
getByUsername : [ "GET /users/{username}" ] ,
getContextForUser : [ "GET /users/{username}/hovercard" ] ,
getGpgKeyForAuthenticated : [ "GET /user/gpg_keys/{gpg_key_id}" ] ,
getPublicSshKeyForAuthenticated : [ "GET /user/keys/{key_id}" ] ,
list : [ "GET /users" ] ,
listBlockedByAuthenticated : [ "GET /user/blocks" ] ,
listEmailsForAuthenticated : [ "GET /user/emails" ] ,
listFollowedByAuthenticated : [ "GET /user/following" ] ,
listFollowersForAuthenticatedUser : [ "GET /user/followers" ] ,
listFollowersForUser : [ "GET /users/{username}/followers" ] ,
listFollowingForUser : [ "GET /users/{username}/following" ] ,
listGpgKeysForAuthenticated : [ "GET /user/gpg_keys" ] ,
listGpgKeysForUser : [ "GET /users/{username}/gpg_keys" ] ,
listPublicEmailsForAuthenticated : [ "GET /user/public_emails" ] ,
listPublicKeysForUser : [ "GET /users/{username}/keys" ] ,
listPublicSshKeysForAuthenticated : [ "GET /user/keys" ] ,
setPrimaryEmailVisibilityForAuthenticated : [ "PATCH /user/email/visibility" ] ,
unblock : [ "DELETE /user/blocks/{username}" ] ,
unfollow : [ "DELETE /user/following/{username}" ] ,
updateAuthenticated : [ "PATCH /user" ]
}
2020-08-21 13:39:42 +02:00
} ;
2020-09-02 10:07:11 +02:00
const VERSION = "4.1.3" ;
function endpointsToMethods ( octokit , endpointsMap ) {
const newMethods = { } ;
for ( const [ scope , endpoints ] of Object . entries ( endpointsMap ) ) {
for ( const [ methodName , endpoint ] of Object . entries ( endpoints ) ) {
const [ route , defaults , decorations ] = endpoint ;
const [ method , url ] = route . split ( / / ) ;
const endpointDefaults = Object . assign ( {
method ,
url
} , defaults ) ;
if ( ! newMethods [ scope ] ) {
newMethods [ scope ] = { } ;
}
const scopeMethods = newMethods [ scope ] ;
if ( decorations ) {
scopeMethods [ methodName ] = decorate ( octokit , scope , methodName , endpointDefaults , decorations ) ;
continue ;
}
scopeMethods [ methodName ] = octokit . request . defaults ( endpointDefaults ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
}
return newMethods ;
}
function decorate ( octokit , scope , methodName , defaults , decorations ) {
const requestWithDefaults = octokit . request . defaults ( defaults ) ;
/* istanbul ignore next */
function withDecorations ( ... args ) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults . endpoint . merge ( ... args ) ; // There are currently no other decorations than `.mapToData`
if ( decorations . mapToData ) {
options = Object . assign ( { } , options , {
data : options [ decorations . mapToData ] ,
[ decorations . mapToData ] : undefined
} ) ;
return requestWithDefaults ( options ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( decorations . renamed ) {
const [ newScope , newMethodName ] = decorations . renamed ;
octokit . log . warn ( ` octokit. ${ scope } . ${ methodName } () has been renamed to octokit. ${ newScope } . ${ newMethodName } () ` ) ;
}
if ( decorations . deprecated ) {
octokit . log . warn ( decorations . deprecated ) ;
}
if ( decorations . renamedParameters ) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults . endpoint . merge ( ... args ) ;
for ( const [ name , alias ] of Object . entries ( decorations . renamedParameters ) ) {
if ( name in options ) {
octokit . log . warn ( ` " ${ name } " parameter is deprecated for "octokit. ${ scope } . ${ methodName } ()". Use " ${ alias } " instead ` ) ;
if ( ! ( alias in options ) ) {
options [ alias ] = options [ name ] ;
}
delete options [ name ] ;
}
}
return requestWithDefaults ( options ) ;
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
return requestWithDefaults ( ... args ) ;
}
return Object . assign ( withDecorations , requestWithDefaults ) ;
}
/ * *
* This plugin is a 1 : 1 copy of internal @ octokit / rest plugins . The primary
* goal is to rebuild @ octokit / rest on top of @ octokit / core . Once that is
* done , we will remove the registerEndpoints methods and return the methods
* directly as with the other plugins . At that point we will also remove the
* legacy workarounds and deprecations .
*
* See the plan at
* https : //github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
* /
function restEndpointMethods ( octokit ) {
return endpointsToMethods ( octokit , Endpoints ) ;
}
restEndpointMethods . VERSION = VERSION ;
exports . restEndpointMethods = restEndpointMethods ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 53 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . Context = void 0 ;
const fs _1 = _ _webpack _require _ _ ( 747 ) ;
const os _1 = _ _webpack _require _ _ ( 87 ) ;
class Context {
/ * *
* Hydrate the context from the environment
* /
constructor ( ) {
this . payload = { } ;
if ( process . env . GITHUB _EVENT _PATH ) {
if ( fs _1 . existsSync ( process . env . GITHUB _EVENT _PATH ) ) {
this . payload = JSON . parse ( fs _1 . readFileSync ( process . env . GITHUB _EVENT _PATH , { encoding : 'utf8' } ) ) ;
2020-08-16 00:36:41 +02:00
}
2020-08-21 13:39:42 +02:00
else {
2020-09-02 10:07:11 +02:00
const path = process . env . GITHUB _EVENT _PATH ;
process . stdout . write ( ` GITHUB_EVENT_PATH ${ path } does not exist ${ os _1 . EOL } ` ) ;
2020-08-16 00:36:41 +02:00
}
}
2020-09-02 10:07:11 +02:00
this . eventName = process . env . GITHUB _EVENT _NAME ;
this . sha = process . env . GITHUB _SHA ;
this . ref = process . env . GITHUB _REF ;
this . workflow = process . env . GITHUB _WORKFLOW ;
this . action = process . env . GITHUB _ACTION ;
this . actor = process . env . GITHUB _ACTOR ;
this . job = process . env . GITHUB _JOB ;
this . runNumber = parseInt ( process . env . GITHUB _RUN _NUMBER , 10 ) ;
this . runId = parseInt ( process . env . GITHUB _RUN _ID , 10 ) ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
get issue ( ) {
const payload = this . payload ;
return Object . assign ( Object . assign ( { } , this . repo ) , { number : ( payload . issue || payload . pull _request || payload ) . number } ) ;
}
get repo ( ) {
if ( process . env . GITHUB _REPOSITORY ) {
const [ owner , repo ] = process . env . GITHUB _REPOSITORY . split ( '/' ) ;
return { owner , repo } ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
if ( this . payload . repository ) {
return {
owner : this . payload . repository . owner . login ,
repo : this . payload . repository . name
} ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
throw new Error ( "context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'" ) ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
}
exports . Context = Context ;
//# sourceMappingURL=context.js.map
/***/ } ) ,
/***/ 55 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
const satisfies = ( version , range , options ) => {
try {
range = new Range ( range , options )
} catch ( er ) {
return false
}
return range . test ( version )
}
module . exports = satisfies
/***/ } ) ,
/***/ 87 :
/***/ ( function ( module ) {
module . exports = require ( "os" ) ;
/***/ } ) ,
/***/ 88 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const debug = _ _webpack _require _ _ ( 427 )
const { MAX _LENGTH , MAX _SAFE _INTEGER } = _ _webpack _require _ _ ( 293 )
const { re , t } = _ _webpack _require _ _ ( 523 )
const { compareIdentifiers } = _ _webpack _require _ _ ( 463 )
class SemVer {
constructor ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
if ( version instanceof SemVer ) {
if ( version . loose === ! ! options . loose &&
version . includePrerelease === ! ! options . includePrerelease ) {
return version
} else {
version = version . version
}
} else if ( typeof version !== 'string' ) {
throw new TypeError ( ` Invalid Version: ${ version } ` )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
if ( version . length > MAX _LENGTH ) {
throw new TypeError (
` version is longer than ${ MAX _LENGTH } characters `
)
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
debug ( 'SemVer' , version , options )
this . options = options
this . loose = ! ! options . loose
// this isn't actually relevant for versions, but keep it so that we
// don't run into trouble passing this.options around.
this . includePrerelease = ! ! options . includePrerelease
const m = version . trim ( ) . match ( options . loose ? re [ t . LOOSE ] : re [ t . FULL ] )
if ( ! m ) {
throw new TypeError ( ` Invalid Version: ${ version } ` )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . raw = version
// these are actually numbers
this . major = + m [ 1 ]
this . minor = + m [ 2 ]
this . patch = + m [ 3 ]
if ( this . major > MAX _SAFE _INTEGER || this . major < 0 ) {
throw new TypeError ( 'Invalid major version' )
}
if ( this . minor > MAX _SAFE _INTEGER || this . minor < 0 ) {
throw new TypeError ( 'Invalid minor version' )
}
if ( this . patch > MAX _SAFE _INTEGER || this . patch < 0 ) {
throw new TypeError ( 'Invalid patch version' )
}
// numberify any prerelease numeric ids
if ( ! m [ 4 ] ) {
this . prerelease = [ ]
} else {
this . prerelease = m [ 4 ] . split ( '.' ) . map ( ( id ) => {
if ( /^[0-9]+$/ . test ( id ) ) {
const num = + id
if ( num >= 0 && num < MAX _SAFE _INTEGER ) {
return num
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
return id
} )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . build = m [ 5 ] ? m [ 5 ] . split ( '.' ) : [ ]
this . format ( )
}
format ( ) {
this . version = ` ${ this . major } . ${ this . minor } . ${ this . patch } `
if ( this . prerelease . length ) {
this . version += ` - ${ this . prerelease . join ( '.' ) } `
}
return this . version
}
toString ( ) {
return this . version
}
compare ( other ) {
debug ( 'SemVer.compare' , this . version , this . options , other )
if ( ! ( other instanceof SemVer ) ) {
if ( typeof other === 'string' && other === this . version ) {
return 0
}
other = new SemVer ( other , this . options )
}
if ( other . version === this . version ) {
return 0
}
return this . compareMain ( other ) || this . comparePre ( other )
}
compareMain ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
return (
compareIdentifiers ( this . major , other . major ) ||
compareIdentifiers ( this . minor , other . minor ) ||
compareIdentifiers ( this . patch , other . patch )
)
}
comparePre ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
// NOT having a prerelease is > having one
if ( this . prerelease . length && ! other . prerelease . length ) {
return - 1
} else if ( ! this . prerelease . length && other . prerelease . length ) {
return 1
} else if ( ! this . prerelease . length && ! other . prerelease . length ) {
return 0
}
let i = 0
do {
const a = this . prerelease [ i ]
const b = other . prerelease [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
compareBuild ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
let i = 0
do {
const a = this . build [ i ]
const b = other . build [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc ( release , identifier ) {
switch ( release ) {
case 'premajor' :
this . prerelease . length = 0
this . patch = 0
this . minor = 0
this . major ++
this . inc ( 'pre' , identifier )
break
case 'preminor' :
this . prerelease . length = 0
this . patch = 0
this . minor ++
this . inc ( 'pre' , identifier )
break
case 'prepatch' :
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this . prerelease . length = 0
this . inc ( 'patch' , identifier )
this . inc ( 'pre' , identifier )
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease' :
if ( this . prerelease . length === 0 ) {
this . inc ( 'patch' , identifier )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . inc ( 'pre' , identifier )
break
case 'major' :
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if (
this . minor !== 0 ||
this . patch !== 0 ||
this . prerelease . length === 0
) {
this . major ++
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . minor = 0
this . patch = 0
this . prerelease = [ ]
break
case 'minor' :
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if ( this . patch !== 0 || this . prerelease . length === 0 ) {
this . minor ++
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . patch = 0
this . prerelease = [ ]
break
case 'patch' :
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if ( this . prerelease . length === 0 ) {
this . patch ++
}
this . prerelease = [ ]
break
// This probably shouldn't be used publicly.
// 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction.
case 'pre' :
if ( this . prerelease . length === 0 ) {
this . prerelease = [ 0 ]
} else {
let i = this . prerelease . length
while ( -- i >= 0 ) {
if ( typeof this . prerelease [ i ] === 'number' ) {
this . prerelease [ i ] ++
i = - 2
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
}
if ( i === - 1 ) {
// didn't increment anything
this . prerelease . push ( 0 )
}
}
if ( identifier ) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if ( this . prerelease [ 0 ] === identifier ) {
if ( isNaN ( this . prerelease [ 1 ] ) ) {
this . prerelease = [ identifier , 0 ]
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else {
this . prerelease = [ identifier , 0 ]
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
break
default :
throw new Error ( ` invalid increment argument: ${ release } ` )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . format ( )
this . raw = this . version
return this
}
}
module . exports = SemVer
/***/ } ) ,
/***/ 98 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const eq = _ _webpack _require _ _ ( 898 )
const neq = _ _webpack _require _ _ ( 17 )
const gt = _ _webpack _require _ _ ( 123 )
const gte = _ _webpack _require _ _ ( 522 )
const lt = _ _webpack _require _ _ ( 194 )
const lte = _ _webpack _require _ _ ( 520 )
const cmp = ( a , op , b , loose ) => {
switch ( op ) {
case '===' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a === b
case '!==' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a !== b
case '' :
case '=' :
case '==' :
return eq ( a , b , loose )
case '!=' :
return neq ( a , b , loose )
case '>' :
return gt ( a , b , loose )
case '>=' :
return gte ( a , b , loose )
case '<' :
return lt ( a , b , loose )
case '<=' :
return lte ( a , b , loose )
default :
throw new TypeError ( ` Invalid operator: ${ op } ` )
}
}
module . exports = cmp
/***/ } ) ,
/***/ 109 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const buildx = _ _importStar ( _ _webpack _require _ _ ( 295 ) ) ;
const context = _ _importStar ( _ _webpack _require _ _ ( 842 ) ) ;
2020-10-21 21:07:52 +02:00
const exec = _ _importStar ( _ _webpack _require _ _ ( 757 ) ) ;
2020-09-02 10:07:11 +02:00
const stateHelper = _ _importStar ( _ _webpack _require _ _ ( 647 ) ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 186 ) ) ;
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
if ( os . platform ( ) !== 'linux' ) {
2020-10-21 21:07:52 +02:00
throw new Error ( ` Only supported on linux platform ` ) ;
2020-09-02 10:07:11 +02:00
}
if ( ! ( yield buildx . isAvailable ( ) ) ) {
2020-10-21 21:07:52 +02:00
throw new Error ( ` Buildx is required. See https://github.com/docker/setup-buildx-action to set up buildx. ` ) ;
2020-09-02 10:07:11 +02:00
}
2020-10-19 21:17:06 +02:00
stateHelper . setTmpDir ( context . tmpDir ( ) ) ;
2020-09-02 10:07:11 +02:00
const buildxVersion = yield buildx . getVersion ( ) ;
core . info ( ` 📣 Buildx version: ${ buildxVersion } ` ) ;
2020-10-19 21:17:06 +02:00
const defContext = context . defaultContext ( ) ;
let inputs = yield context . getInputs ( defContext ) ;
2020-09-02 10:07:11 +02:00
core . info ( ` 🏃 Starting build... ` ) ;
2020-10-19 21:17:06 +02:00
const args = yield context . getArgs ( inputs , defContext , buildxVersion ) ;
2020-10-21 21:07:52 +02:00
yield exec . exec ( 'docker' , args ) . then ( res => {
if ( res . stderr != '' && ! res . success ) {
throw new Error ( ` buildx call failed with: ${ res . stderr . match ( /(.*)\s*$/ ) [ 0 ] } ` ) ;
}
} ) ;
2020-09-02 10:07:11 +02:00
const imageID = yield buildx . getImageID ( ) ;
if ( imageID ) {
core . info ( '🛒 Extracting digest...' ) ;
core . info ( ` ${ imageID } ` ) ;
core . setOutput ( 'digest' , imageID ) ;
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
catch ( error ) {
core . setFailed ( error . message ) ;
}
} ) ;
}
function cleanup ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( stateHelper . tmpDir . length > 0 ) {
core . info ( ` 🚿 Removing temp folder ${ stateHelper . tmpDir } ` ) ;
fs . rmdirSync ( stateHelper . tmpDir , { recursive : true } ) ;
}
} ) ;
}
if ( ! stateHelper . IsPost ) {
run ( ) ;
}
else {
cleanup ( ) ;
}
//# sourceMappingURL=main.js.map
/***/ } ) ,
/***/ 123 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const gt = ( a , b , loose ) => compare ( a , b , loose ) > 0
module . exports = gt
/***/ } ) ,
/***/ 124 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
try {
var util = _ _webpack _require _ _ ( 669 ) ;
/* istanbul ignore next */
if ( typeof util . inherits !== 'function' ) throw '' ;
module . exports = util . inherits ;
} catch ( e ) {
/* istanbul ignore next */
module . exports = _ _webpack _require _ _ ( 544 ) ;
}
/***/ } ) ,
/***/ 129 :
/***/ ( function ( module ) {
module . exports = require ( "child_process" ) ;
/***/ } ) ,
/***/ 156 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const compareBuild = ( a , b , loose ) => {
const versionA = new SemVer ( a , loose )
const versionB = new SemVer ( b , loose )
return versionA . compare ( versionB ) || versionA . compareBuild ( versionB )
}
module . exports = compareBuild
/***/ } ) ,
/***/ 159 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const events = _ _importStar ( _ _webpack _require _ _ ( 614 ) ) ;
const child = _ _importStar ( _ _webpack _require _ _ ( 129 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
const io = _ _importStar ( _ _webpack _require _ _ ( 436 ) ) ;
const ioUtil = _ _importStar ( _ _webpack _require _ _ ( 962 ) ) ;
/* eslint-disable @typescript-eslint/unbound-method */
const IS _WINDOWS = process . platform === 'win32' ;
/ *
* Class for running command line tools . Handles quoting and arg parsing in a platform agnostic way .
* /
class ToolRunner extends events . EventEmitter {
constructor ( toolPath , args , options ) {
super ( ) ;
if ( ! toolPath ) {
throw new Error ( "Parameter 'toolPath' cannot be null or empty." ) ;
}
this . toolPath = toolPath ;
this . args = args || [ ] ;
this . options = options || { } ;
}
_debug ( message ) {
if ( this . options . listeners && this . options . listeners . debug ) {
this . options . listeners . debug ( message ) ;
}
}
_getCommandString ( options , noPrefix ) {
const toolPath = this . _getSpawnFileName ( ) ;
const args = this . _getSpawnArgs ( options ) ;
let cmd = noPrefix ? '' : '[command]' ; // omit prefix when piped to a second tool
if ( IS _WINDOWS ) {
// Windows + cmd file
if ( this . _isCmdFile ( ) ) {
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows + verbatim
else if ( options . windowsVerbatimArguments ) {
cmd += ` " ${ toolPath } " ` ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows (regular)
else {
cmd += this . _windowsQuoteCmdArg ( toolPath ) ;
for ( const a of args ) {
cmd += ` ${ this . _windowsQuoteCmdArg ( a ) } ` ;
}
}
}
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
return cmd ;
}
_processLineBuffer ( data , strBuffer , onLine ) {
try {
let s = strBuffer + data . toString ( ) ;
let n = s . indexOf ( os . EOL ) ;
while ( n > - 1 ) {
const line = s . substring ( 0 , n ) ;
onLine ( line ) ;
// the rest of the string ...
s = s . substring ( n + os . EOL . length ) ;
n = s . indexOf ( os . EOL ) ;
}
strBuffer = s ;
}
catch ( err ) {
// streaming lines to console is best effort. Don't fail a build.
this . _debug ( ` error processing line. Failed with error ${ err } ` ) ;
}
}
_getSpawnFileName ( ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
return process . env [ 'COMSPEC' ] || 'cmd.exe' ;
}
}
return this . toolPath ;
}
_getSpawnArgs ( options ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
let argline = ` /D /S /C " ${ this . _windowsQuoteCmdArg ( this . toolPath ) } ` ;
for ( const a of this . args ) {
argline += ' ' ;
argline += options . windowsVerbatimArguments
? a
: this . _windowsQuoteCmdArg ( a ) ;
}
argline += '"' ;
return [ argline ] ;
}
}
return this . args ;
}
_endsWith ( str , end ) {
return str . endsWith ( end ) ;
}
_isCmdFile ( ) {
const upperToolPath = this . toolPath . toUpperCase ( ) ;
return ( this . _endsWith ( upperToolPath , '.CMD' ) ||
this . _endsWith ( upperToolPath , '.BAT' ) ) ;
}
_windowsQuoteCmdArg ( arg ) {
// for .exe, apply the normal quoting rules that libuv applies
if ( ! this . _isCmdFile ( ) ) {
return this . _uvQuoteCmdArg ( arg ) ;
}
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if ( ! arg ) {
return '""' ;
}
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ' ,
'\t' ,
'&' ,
'(' ,
')' ,
'[' ,
']' ,
'{' ,
'}' ,
'^' ,
'=' ,
';' ,
'!' ,
"'" ,
'+' ,
',' ,
'`' ,
'~' ,
'|' ,
'<' ,
'>' ,
'"'
] ;
let needsQuotes = false ;
for ( const char of arg ) {
if ( cmdSpecialChars . some ( x => x === char ) ) {
needsQuotes = true ;
break ;
}
}
// short-circuit if quotes not needed
if ( ! needsQuotes ) {
return arg ;
}
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ; // double the slash
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '"' ; // double the quote
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_uvQuoteCmdArg ( arg ) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if ( ! arg ) {
// Need double quotation for empty argument
return '""' ;
}
if ( ! arg . includes ( ' ' ) && ! arg . includes ( '\t' ) && ! arg . includes ( '"' ) ) {
// No quotation needed
return arg ;
}
if ( ! arg . includes ( '"' ) && ! arg . includes ( '\\' ) ) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return ` " ${ arg } " ` ;
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ;
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '\\' ;
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_cloneExecOptions ( options ) {
options = options || { } ;
const result = {
cwd : options . cwd || process . cwd ( ) ,
env : options . env || process . env ,
silent : options . silent || false ,
windowsVerbatimArguments : options . windowsVerbatimArguments || false ,
failOnStdErr : options . failOnStdErr || false ,
ignoreReturnCode : options . ignoreReturnCode || false ,
delay : options . delay || 10000
} ;
result . outStream = options . outStream || process . stdout ;
result . errStream = options . errStream || process . stderr ;
return result ;
}
_getSpawnOptions ( options , toolPath ) {
options = options || { } ;
const result = { } ;
result . cwd = options . cwd ;
result . env = options . env ;
result [ 'windowsVerbatimArguments' ] =
options . windowsVerbatimArguments || this . _isCmdFile ( ) ;
if ( options . windowsVerbatimArguments ) {
result . argv0 = ` " ${ toolPath } " ` ;
}
return result ;
}
/ * *
* Exec a tool .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param tool path to tool to exec
* @ param options optional exec options . See ExecOptions
* @ returns number
* /
exec ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// root the tool path if it is unrooted and contains relative pathing
if ( ! ioUtil . isRooted ( this . toolPath ) &&
( this . toolPath . includes ( '/' ) ||
( IS _WINDOWS && this . toolPath . includes ( '\\' ) ) ) ) {
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
this . toolPath = path . resolve ( process . cwd ( ) , this . options . cwd || process . cwd ( ) , this . toolPath ) ;
}
// if the tool is only a file name, then resolve it from the PATH
// otherwise verify it exists (add extension on Windows if necessary)
this . toolPath = yield io . which ( this . toolPath , true ) ;
return new Promise ( ( resolve , reject ) => {
this . _debug ( ` exec tool: ${ this . toolPath } ` ) ;
this . _debug ( 'arguments:' ) ;
for ( const arg of this . args ) {
this . _debug ( ` ${ arg } ` ) ;
}
const optionsNonNull = this . _cloneExecOptions ( this . options ) ;
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( this . _getCommandString ( optionsNonNull ) + os . EOL ) ;
}
const state = new ExecState ( optionsNonNull , this . toolPath ) ;
state . on ( 'debug' , ( message ) => {
this . _debug ( message ) ;
} ) ;
const fileName = this . _getSpawnFileName ( ) ;
const cp = child . spawn ( fileName , this . _getSpawnArgs ( optionsNonNull ) , this . _getSpawnOptions ( this . options , fileName ) ) ;
const stdbuffer = '' ;
if ( cp . stdout ) {
cp . stdout . on ( 'data' , ( data ) => {
if ( this . options . listeners && this . options . listeners . stdout ) {
this . options . listeners . stdout ( data ) ;
}
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( data ) ;
}
this . _processLineBuffer ( data , stdbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . stdline ) {
this . options . listeners . stdline ( line ) ;
}
} ) ;
} ) ;
}
const errbuffer = '' ;
if ( cp . stderr ) {
cp . stderr . on ( 'data' , ( data ) => {
state . processStderr = true ;
if ( this . options . listeners && this . options . listeners . stderr ) {
this . options . listeners . stderr ( data ) ;
}
if ( ! optionsNonNull . silent &&
optionsNonNull . errStream &&
optionsNonNull . outStream ) {
const s = optionsNonNull . failOnStdErr
? optionsNonNull . errStream
: optionsNonNull . outStream ;
s . write ( data ) ;
}
this . _processLineBuffer ( data , errbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . errline ) {
this . options . listeners . errline ( line ) ;
}
} ) ;
} ) ;
}
cp . on ( 'error' , ( err ) => {
state . processError = err . message ;
state . processExited = true ;
state . processClosed = true ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'exit' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
this . _debug ( ` Exit code ${ code } received from tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'close' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
state . processClosed = true ;
this . _debug ( ` STDIO streams have closed for tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
state . on ( 'done' , ( error , exitCode ) => {
if ( stdbuffer . length > 0 ) {
this . emit ( 'stdline' , stdbuffer ) ;
}
if ( errbuffer . length > 0 ) {
this . emit ( 'errline' , errbuffer ) ;
}
cp . removeAllListeners ( ) ;
if ( error ) {
reject ( error ) ;
}
else {
resolve ( exitCode ) ;
}
} ) ;
if ( this . options . input ) {
if ( ! cp . stdin ) {
throw new Error ( 'child process missing stdin' ) ;
}
cp . stdin . end ( this . options . input ) ;
}
} ) ;
} ) ;
}
}
exports . ToolRunner = ToolRunner ;
/ * *
* Convert an arg string to an array of args . Handles escaping
*
* @ param argString string of arguments
* @ returns string [ ] array of arguments
* /
function argStringToArray ( argString ) {
const args = [ ] ;
let inQuotes = false ;
let escaped = false ;
let arg = '' ;
function append ( c ) {
// we only escape double quotes.
if ( escaped && c !== '"' ) {
arg += '\\' ;
}
arg += c ;
escaped = false ;
}
for ( let i = 0 ; i < argString . length ; i ++ ) {
const c = argString . charAt ( i ) ;
if ( c === '"' ) {
if ( ! escaped ) {
inQuotes = ! inQuotes ;
}
else {
append ( c ) ;
}
continue ;
}
if ( c === '\\' && escaped ) {
append ( c ) ;
continue ;
}
if ( c === '\\' && inQuotes ) {
escaped = true ;
continue ;
}
if ( c === ' ' && ! inQuotes ) {
if ( arg . length > 0 ) {
args . push ( arg ) ;
arg = '' ;
}
continue ;
}
append ( c ) ;
}
if ( arg . length > 0 ) {
args . push ( arg . trim ( ) ) ;
}
return args ;
}
exports . argStringToArray = argStringToArray ;
class ExecState extends events . EventEmitter {
constructor ( options , toolPath ) {
super ( ) ;
this . processClosed = false ; // tracks whether the process has exited and stdio is closed
this . processError = '' ;
this . processExitCode = 0 ;
this . processExited = false ; // tracks whether the process has exited
this . processStderr = false ; // tracks whether stderr was written to
this . delay = 10000 ; // 10 seconds
this . done = false ;
this . timeout = null ;
if ( ! toolPath ) {
throw new Error ( 'toolPath must not be empty' ) ;
}
this . options = options ;
this . toolPath = toolPath ;
if ( options . delay ) {
this . delay = options . delay ;
}
}
CheckComplete ( ) {
if ( this . done ) {
return ;
}
if ( this . processClosed ) {
this . _setResult ( ) ;
}
else if ( this . processExited ) {
this . timeout = setTimeout ( ExecState . HandleTimeout , this . delay , this ) ;
}
}
_debug ( message ) {
this . emit ( 'debug' , message ) ;
}
_setResult ( ) {
// determine whether there is an error
let error ;
if ( this . processExited ) {
if ( this . processError ) {
error = new Error ( ` There was an error when attempting to execute the process ' ${ this . toolPath } '. This may indicate the process failed to start. Error: ${ this . processError } ` ) ;
}
else if ( this . processExitCode !== 0 && ! this . options . ignoreReturnCode ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed with exit code ${ this . processExitCode } ` ) ;
}
else if ( this . processStderr && this . options . failOnStdErr ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed because one or more lines were written to the STDERR stream ` ) ;
}
}
// clear the timeout
if ( this . timeout ) {
clearTimeout ( this . timeout ) ;
this . timeout = null ;
}
this . done = true ;
this . emit ( 'done' , error , this . processExitCode ) ;
}
static HandleTimeout ( state ) {
if ( state . done ) {
return ;
}
if ( ! state . processClosed && state . processExited ) {
const message = ` The STDIO streams did not close within ${ state . delay /
1000 } seconds of the exit event from process '${state.toolPath}' . This may indicate a child process inherited the STDIO streams and has not yet exited . ` ;
state . _debug ( message ) ;
}
state . _setResult ( ) ;
}
}
//# sourceMappingURL=toolrunner.js.map
/***/ } ) ,
/***/ 179 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const Range = _ _webpack _require _ _ ( 828 )
const gt = _ _webpack _require _ _ ( 123 )
const minVersion = ( range , loose ) => {
range = new Range ( range , loose )
let minver = new SemVer ( '0.0.0' )
if ( range . test ( minver ) ) {
return minver
}
minver = new SemVer ( '0.0.0-0' )
if ( range . test ( minver ) ) {
return minver
}
minver = null
for ( let i = 0 ; i < range . set . length ; ++ i ) {
const comparators = range . set [ i ]
comparators . forEach ( ( comparator ) => {
// Clone to avoid manipulating the comparator's semver object.
const compver = new SemVer ( comparator . semver . version )
switch ( comparator . operator ) {
case '>' :
if ( compver . prerelease . length === 0 ) {
compver . patch ++
} else {
compver . prerelease . push ( 0 )
}
compver . raw = compver . format ( )
/* fallthrough */
case '' :
case '>=' :
if ( ! minver || gt ( minver , compver ) ) {
minver = compver
}
break
case '<' :
case '<=' :
/* Ignore maximum versions */
break
/* istanbul ignore next */
default :
throw new Error ( ` Unexpected operation: ${ comparator . operator } ` )
}
} )
}
if ( minver && range . test ( minver ) ) {
return minver
}
return null
}
module . exports = minVersion
/***/ } ) ,
/***/ 185 :
2020-09-29 01:19:32 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
2020-09-29 01:19:32 +02:00
"use strict" ;
// For internal use, subject to change.
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const utils _1 = _ _webpack _require _ _ ( 278 ) ;
function issueCommand ( command , message ) {
const filePath = process . env [ ` GITHUB_ ${ command } ` ] ;
if ( ! filePath ) {
throw new Error ( ` Unable to find environment variable for file command ${ command } ` ) ;
}
if ( ! fs . existsSync ( filePath ) ) {
throw new Error ( ` Missing file at path: ${ filePath } ` ) ;
}
fs . appendFileSync ( filePath , ` ${ utils _1 . toCommandValue ( message ) } ${ os . EOL } ` , {
encoding : 'utf8'
} ) ;
}
exports . issueCommand = issueCommand ;
//# sourceMappingURL=file-command.js.map
2020-09-02 10:07:11 +02:00
/***/ } ) ,
/***/ 186 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const command _1 = _ _webpack _require _ _ ( 351 ) ;
2020-09-29 01:19:32 +02:00
const file _command _1 = _ _webpack _require _ _ ( 185 ) ;
const utils _1 = _ _webpack _require _ _ ( 278 ) ;
2020-09-02 10:07:11 +02:00
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
2020-09-29 01:19:32 +02:00
const convertedVal = utils _1 . toCommandValue ( val ) ;
2020-09-02 10:07:11 +02:00
process . env [ name ] = convertedVal ;
2020-09-29 01:19:32 +02:00
const filePath = process . env [ 'GITHUB_ENV' ] || '' ;
if ( filePath ) {
const delimiter = '_GitHubActionsFileCommandDelimeter_' ;
const commandValue = ` ${ name } << ${ delimiter } ${ os . EOL } ${ convertedVal } ${ os . EOL } ${ delimiter } ` ;
file _command _1 . issueCommand ( 'ENV' , commandValue ) ;
}
else {
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
}
2020-09-02 10:07:11 +02:00
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
2020-09-29 01:19:32 +02:00
const filePath = process . env [ 'GITHUB_PATH' ] || '' ;
if ( filePath ) {
file _command _1 . issueCommand ( 'PATH' , inputPath ) ;
}
else {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
}
2020-09-02 10:07:11 +02:00
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input . The value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
command _1 . issueCommand ( 'set-output' , { name } , value ) ;
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* /
function error ( message ) {
command _1 . issue ( 'error' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . error = error ;
/ * *
* Adds an warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* /
function warning ( message ) {
command _1 . issue ( 'warning' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . warning = warning ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
command _1 . issueCommand ( 'save-state' , { name } , value ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
//# sourceMappingURL=core.js.map
/***/ } ) ,
/***/ 193 :
2020-10-23 18:21:44 +02:00
/***/ ( function ( _ _unusedmodule , exports ) {
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-10-23 18:21:44 +02:00
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
const VERSION = "2.3.1" ;
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
/ * *
* Some “ list ” response that can be paginated have a different response structure
*
* They have a ` total_count ` key in the response ( search also has ` incomplete_results ` ,
* / i n s t a l l a t i o n / r e p o s i t o r i e s a l s o h a s ` r e p o s i t o r y _ s e l e c t i o n ` ) , a s w e l l a s a k e y w i t h
* the list of the items which name varies from endpoint to endpoint .
*
* Octokit normalizes these responses so that paginated results are always returned following
* the same structure . One challenge is that if the list response has only one page , no Link
* header is provided , so this header alone is not sufficient to check wether a response is
* paginated or not .
*
* We check if a "total_count" key is present in the response data , but also make sure that
* a "url" property is not , as the "Get the combined status for a specific ref" endpoint would
* otherwise match : https : //developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
* /
function normalizePaginatedListResponse ( response ) {
const responseNeedsNormalization = "total_count" in response . data && ! ( "url" in response . data ) ;
if ( ! responseNeedsNormalization ) return response ; // keep the additional properties intact as there is currently no other way
// to retrieve the same information.
const incompleteResults = response . data . incomplete _results ;
const repositorySelection = response . data . repository _selection ;
const totalCount = response . data . total _count ;
delete response . data . incomplete _results ;
delete response . data . repository _selection ;
delete response . data . total _count ;
const namespaceKey = Object . keys ( response . data ) [ 0 ] ;
const data = response . data [ namespaceKey ] ;
response . data = data ;
if ( typeof incompleteResults !== "undefined" ) {
response . data . incomplete _results = incompleteResults ;
}
if ( typeof repositorySelection !== "undefined" ) {
response . data . repository _selection = repositorySelection ;
}
response . data . total _count = totalCount ;
return response ;
2020-09-02 10:07:11 +02:00
}
2020-10-23 18:21:44 +02:00
function iterator ( octokit , route , parameters ) {
const options = typeof route === "function" ? route . endpoint ( parameters ) : octokit . request . endpoint ( route , parameters ) ;
const requestMethod = typeof route === "function" ? route : octokit . request ;
const method = options . method ;
const headers = options . headers ;
let url = options . url ;
return {
[ Symbol . asyncIterator ] : ( ) => ( {
next ( ) {
if ( ! url ) {
return Promise . resolve ( {
done : true
} ) ;
}
return requestMethod ( {
method ,
url ,
headers
} ) . then ( normalizePaginatedListResponse ) . then ( response => {
// `response.headers.link` format:
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
// sets `url` to undefined if "next" URL is not present or `link` header is not set
url = ( ( response . headers . link || "" ) . match ( /<([^>]+)>;\s*rel="next"/ ) || [ ] ) [ 1 ] ;
return {
value : response
} ;
} ) ;
}
} )
} ;
}
function paginate ( octokit , route , parameters , mapFn ) {
if ( typeof parameters === "function" ) {
mapFn = parameters ;
parameters = undefined ;
}
return gather ( octokit , [ ] , iterator ( octokit , route , parameters ) [ Symbol . asyncIterator ] ( ) , mapFn ) ;
}
function gather ( octokit , results , iterator , mapFn ) {
return iterator . next ( ) . then ( result => {
if ( result . done ) {
return results ;
}
let earlyExit = false ;
function done ( ) {
earlyExit = true ;
}
results = results . concat ( mapFn ? mapFn ( result . value , done ) : result . value . data ) ;
if ( earlyExit ) {
return results ;
}
return gather ( octokit , results , iterator , mapFn ) ;
} ) ;
}
/ * *
* @ param octokit Octokit instance
* @ param options Options passed to Octokit constructor
* /
function paginateRest ( octokit ) {
return {
paginate : Object . assign ( paginate . bind ( null , octokit ) , {
iterator : iterator . bind ( null , octokit )
} )
} ;
}
paginateRest . VERSION = VERSION ;
exports . paginateRest = paginateRest ;
//# sourceMappingURL=index.js.map
2020-09-02 10:07:11 +02:00
/***/ } ) ,
/***/ 194 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const lt = ( a , b , loose ) => compare ( a , b , loose ) < 0
module . exports = lt
/***/ } ) ,
/***/ 211 :
/***/ ( function ( module ) {
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 219 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var net = _ _webpack _require _ _ ( 631 ) ;
var tls = _ _webpack _require _ _ ( 818 ) ;
var http = _ _webpack _require _ _ ( 605 ) ;
var https = _ _webpack _require _ _ ( 211 ) ;
var events = _ _webpack _require _ _ ( 614 ) ;
var assert = _ _webpack _require _ _ ( 357 ) ;
var util = _ _webpack _require _ _ ( 669 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
}
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
}
}
}
return target ;
}
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
}
exports . debug = debug ; // for test
/***/ } ) ,
/***/ 223 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var wrappy = _ _webpack _require _ _ ( 940 )
module . exports = wrappy ( once )
module . exports . strict = wrappy ( onceStrict )
once . proto = once ( function ( ) {
Object . defineProperty ( Function . prototype , 'once' , {
value : function ( ) {
return once ( this )
} ,
configurable : true
} )
Object . defineProperty ( Function . prototype , 'onceStrict' , {
value : function ( ) {
return onceStrict ( this )
} ,
configurable : true
} )
} )
function once ( fn ) {
var f = function ( ) {
if ( f . called ) return f . value
f . called = true
return f . value = fn . apply ( this , arguments )
}
f . called = false
return f
}
function onceStrict ( fn ) {
var f = function ( ) {
if ( f . called )
throw new Error ( f . onceError )
f . called = true
return f . value = fn . apply ( this , arguments )
}
var name = fn . name || 'Function wrapped with `once`'
f . onceError = name + " shouldn't be called more than once"
f . called = false
return f
}
/***/ } ) ,
/***/ 234 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var endpoint = _ _webpack _require _ _ ( 440 ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
var isPlainObject = _interopDefault ( _ _webpack _require _ _ ( 886 ) ) ;
var nodeFetch = _interopDefault ( _ _webpack _require _ _ ( 467 ) ) ;
var requestError = _ _webpack _require _ _ ( 537 ) ;
const VERSION = "5.4.7" ;
function getBufferResponse ( response ) {
return response . arrayBuffer ( ) ;
}
function fetchWrapper ( requestOptions ) {
if ( isPlainObject ( requestOptions . body ) || Array . isArray ( requestOptions . body ) ) {
requestOptions . body = JSON . stringify ( requestOptions . body ) ;
}
let headers = { } ;
let status ;
let url ;
const fetch = requestOptions . request && requestOptions . request . fetch || nodeFetch ;
return fetch ( requestOptions . url , Object . assign ( {
method : requestOptions . method ,
body : requestOptions . body ,
headers : requestOptions . headers ,
redirect : requestOptions . redirect
} , requestOptions . request ) ) . then ( response => {
url = response . url ;
status = response . status ;
for ( const keyAndValue of response . headers ) {
headers [ keyAndValue [ 0 ] ] = keyAndValue [ 1 ] ;
}
if ( status === 204 || status === 205 ) {
return ;
} // GitHub API returns 200 for HEAD requests
if ( requestOptions . method === "HEAD" ) {
if ( status < 400 ) {
return ;
}
throw new requestError . RequestError ( response . statusText , status , {
headers ,
request : requestOptions
} ) ;
}
if ( status === 304 ) {
throw new requestError . RequestError ( "Not modified" , status , {
headers ,
request : requestOptions
} ) ;
}
if ( status >= 400 ) {
return response . text ( ) . then ( message => {
const error = new requestError . RequestError ( message , status , {
headers ,
request : requestOptions
} ) ;
try {
let responseBody = JSON . parse ( error . message ) ;
Object . assign ( error , responseBody ) ;
let errors = responseBody . errors ; // Assumption `errors` would always be in Array format
error . message = error . message + ": " + errors . map ( JSON . stringify ) . join ( ", " ) ;
} catch ( e ) { // ignore, see octokit/rest.js#684
}
throw error ;
} ) ;
}
const contentType = response . headers . get ( "content-type" ) ;
if ( /application\/json/ . test ( contentType ) ) {
return response . json ( ) ;
}
if ( ! contentType || /^text\/|charset=utf-8$/ . test ( contentType ) ) {
return response . text ( ) ;
}
return getBufferResponse ( response ) ;
} ) . then ( data => {
return {
status ,
url ,
headers ,
data
} ;
} ) . catch ( error => {
if ( error instanceof requestError . RequestError ) {
throw error ;
}
throw new requestError . RequestError ( error . message , 500 , {
headers ,
request : requestOptions
} ) ;
} ) ;
}
function withDefaults ( oldEndpoint , newDefaults ) {
const endpoint = oldEndpoint . defaults ( newDefaults ) ;
const newApi = function ( route , parameters ) {
const endpointOptions = endpoint . merge ( route , parameters ) ;
if ( ! endpointOptions . request || ! endpointOptions . request . hook ) {
return fetchWrapper ( endpoint . parse ( endpointOptions ) ) ;
}
const request = ( route , parameters ) => {
return fetchWrapper ( endpoint . parse ( endpoint . merge ( route , parameters ) ) ) ;
} ;
Object . assign ( request , {
endpoint ,
defaults : withDefaults . bind ( null , endpoint )
} ) ;
return endpointOptions . request . hook ( request , endpointOptions ) ;
} ;
return Object . assign ( newApi , {
endpoint ,
defaults : withDefaults . bind ( null , endpoint )
} ) ;
}
const request = withDefaults ( endpoint . endpoint , {
headers : {
"user-agent" : ` octokit-request.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } `
}
} ) ;
exports . request = request ;
//# sourceMappingURL=index.js.map
2020-09-29 01:19:32 +02:00
/***/ } ) ,
/***/ 278 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
//# sourceMappingURL=utils.js.map
2020-10-19 21:17:06 +02:00
/***/ } ) ,
/***/ 290 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = realpath
realpath . realpath = realpath
realpath . sync = realpathSync
realpath . realpathSync = realpathSync
realpath . monkeypatch = monkeypatch
realpath . unmonkeypatch = unmonkeypatch
var fs = _ _webpack _require _ _ ( 747 )
var origRealpath = fs . realpath
var origRealpathSync = fs . realpathSync
var version = process . version
var ok = /^v[0-5]\./ . test ( version )
var old = _ _webpack _require _ _ ( 734 )
function newError ( er ) {
return er && er . syscall === 'realpath' && (
er . code === 'ELOOP' ||
er . code === 'ENOMEM' ||
er . code === 'ENAMETOOLONG'
)
}
function realpath ( p , cache , cb ) {
if ( ok ) {
return origRealpath ( p , cache , cb )
}
if ( typeof cache === 'function' ) {
cb = cache
cache = null
}
origRealpath ( p , cache , function ( er , result ) {
if ( newError ( er ) ) {
old . realpath ( p , cache , cb )
} else {
cb ( er , result )
}
} )
}
function realpathSync ( p , cache ) {
if ( ok ) {
return origRealpathSync ( p , cache )
}
try {
return origRealpathSync ( p , cache )
} catch ( er ) {
if ( newError ( er ) ) {
return old . realpathSync ( p , cache )
} else {
throw er
}
}
}
function monkeypatch ( ) {
fs . realpath = realpath
fs . realpathSync = realpathSync
}
function unmonkeypatch ( ) {
fs . realpath = origRealpath
fs . realpathSync = origRealpathSync
}
2020-09-02 10:07:11 +02:00
/***/ } ) ,
/***/ 293 :
/***/ ( function ( module ) {
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
const SEMVER _SPEC _VERSION = '2.0.0'
const MAX _LENGTH = 256
const MAX _SAFE _INTEGER = Number . MAX _SAFE _INTEGER ||
/* istanbul ignore next */ 9007199254740991
// Max safe segment length for coercion.
const MAX _SAFE _COMPONENT _LENGTH = 16
module . exports = {
SEMVER _SPEC _VERSION ,
MAX _LENGTH ,
MAX _SAFE _INTEGER ,
MAX _SAFE _COMPONENT _LENGTH
}
/***/ } ) ,
/***/ 294 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = _ _webpack _require _ _ ( 219 ) ;
/***/ } ) ,
/***/ 295 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-10-19 22:12:33 +02:00
exports . parseVersion = exports . getVersion = exports . isAvailable = exports . hasGitAuthToken = exports . isLocalOrTarExporter = exports . getSecret = exports . getImageID = exports . getImageIDFile = void 0 ;
2020-09-02 10:07:11 +02:00
const fs _1 = _ _importDefault ( _ _webpack _require _ _ ( 747 ) ) ;
const path _1 = _ _importDefault ( _ _webpack _require _ _ ( 622 ) ) ;
2020-10-20 15:18:02 +02:00
const sync _1 = _ _importDefault ( _ _webpack _require _ _ ( 750 ) ) ;
2020-09-02 10:07:11 +02:00
const semver = _ _importStar ( _ _webpack _require _ _ ( 383 ) ) ;
const context = _ _importStar ( _ _webpack _require _ _ ( 842 ) ) ;
const exec = _ _importStar ( _ _webpack _require _ _ ( 757 ) ) ;
function getImageIDFile ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-10-19 21:17:06 +02:00
return path _1 . default . join ( context . tmpDir ( ) , 'iidfile' ) . split ( path _1 . default . sep ) . join ( path _1 . default . posix . sep ) ;
2020-09-02 10:07:11 +02:00
} ) ;
}
exports . getImageIDFile = getImageIDFile ;
function getImageID ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const iidFile = yield getImageIDFile ( ) ;
if ( ! fs _1 . default . existsSync ( iidFile ) ) {
return undefined ;
}
return fs _1 . default . readFileSync ( iidFile , { encoding : 'utf-8' } ) ;
} ) ;
}
exports . getImageID = getImageID ;
function getSecret ( kvp ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-10-23 21:02:42 +02:00
const delimiterIndex = kvp . indexOf ( '=' ) ;
const key = kvp . substring ( 0 , delimiterIndex ) ;
const value = kvp . substring ( delimiterIndex + 1 ) ;
2020-10-19 21:17:06 +02:00
const secretFile = context . tmpNameSync ( {
tmpdir : context . tmpDir ( )
2020-09-02 10:07:11 +02:00
} ) ;
yield fs _1 . default . writeFileSync ( secretFile , value ) ;
return ` id= ${ key } ,src= ${ secretFile } ` ;
} ) ;
}
exports . getSecret = getSecret ;
2020-10-19 22:12:33 +02:00
function isLocalOrTarExporter ( outputs ) {
2020-10-20 15:18:02 +02:00
for ( let output of sync _1 . default ( outputs . join ( ` \n ` ) , {
delimiter : ',' ,
trim : true ,
columns : false ,
relax _column _count : true
} ) ) {
2020-10-20 17:53:03 +02:00
// Local if no type is defined
// https://github.com/docker/buildx/blob/d2bf42f8b4784d83fde17acb3ed84703ddc2156b/build/output.go#L29-L43
if ( output . length == 1 && ! output [ 0 ] . startsWith ( 'type=' ) ) {
return true ;
}
2020-10-20 15:18:02 +02:00
for ( let [ key , value ] of output . map ( chunk => chunk . split ( '=' ) . map ( item => item . trim ( ) ) ) ) {
2020-10-19 22:12:33 +02:00
if ( key == 'type' && ( value == 'local' || value == 'tar' ) ) {
return true ;
}
}
}
return false ;
}
exports . isLocalOrTarExporter = isLocalOrTarExporter ;
function hasGitAuthToken ( secrets ) {
for ( let secret of secrets ) {
if ( secret . startsWith ( 'GIT_AUTH_TOKEN=' ) ) {
return true ;
}
}
return false ;
}
exports . hasGitAuthToken = hasGitAuthToken ;
2020-09-02 10:07:11 +02:00
function isAvailable ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield exec . exec ( ` docker ` , [ 'buildx' ] , true ) . then ( res => {
if ( res . stderr != '' && ! res . success ) {
return false ;
}
return res . success ;
} ) ;
} ) ;
}
exports . isAvailable = isAvailable ;
function getVersion ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield exec . exec ( ` docker ` , [ 'buildx' , 'version' ] , true ) . then ( res => {
if ( res . stderr != '' && ! res . success ) {
throw new Error ( res . stderr ) ;
}
return parseVersion ( res . stdout ) ;
} ) ;
} ) ;
}
exports . getVersion = getVersion ;
function parseVersion ( stdout ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const matches = /\sv?([0-9.]+)/ . exec ( stdout ) ;
if ( ! matches ) {
throw new Error ( ` Cannot parse Buildx version ` ) ;
}
return semver . clean ( matches [ 1 ] ) ;
} ) ;
}
exports . parseVersion = parseVersion ;
//# sourceMappingURL=buildx.js.map
/***/ } ) ,
/***/ 297 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const parse = _ _webpack _require _ _ ( 925 )
const eq = _ _webpack _require _ _ ( 898 )
const diff = ( version1 , version2 ) => {
if ( eq ( version1 , version2 ) ) {
return null
} else {
const v1 = parse ( version1 )
const v2 = parse ( version2 )
const hasPre = v1 . prerelease . length || v2 . prerelease . length
const prefix = hasPre ? 'pre' : ''
const defaultResult = hasPre ? 'prerelease' : ''
for ( const key in v1 ) {
if ( key === 'major' || key === 'minor' || key === 'patch' ) {
if ( v1 [ key ] !== v2 [ key ] ) {
return prefix + key
}
}
}
return defaultResult // may be undefined
}
}
module . exports = diff
/***/ } ) ,
/***/ 309 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const compare = ( a , b , loose ) =>
new SemVer ( a , loose ) . compare ( new SemVer ( b , loose ) )
module . exports = compare
/***/ } ) ,
/***/ 323 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const outside = _ _webpack _require _ _ ( 420 )
// Determine if version is less than all the versions possible in the range
const ltr = ( version , range , options ) => outside ( version , range , '<' , options )
module . exports = ltr
/***/ } ) ,
/***/ 334 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
async function auth ( token ) {
const tokenType = token . split ( /\./ ) . length === 3 ? "app" : /^v\d+\./ . test ( token ) ? "installation" : "oauth" ;
return {
type : "token" ,
token : token ,
tokenType
} ;
}
/ * *
* Prefix token for usage in the Authorization header
*
* @ param token OAuth token or JSON Web Token
* /
function withAuthorizationPrefix ( token ) {
if ( token . split ( /\./ ) . length === 3 ) {
return ` bearer ${ token } ` ;
}
return ` token ${ token } ` ;
}
async function hook ( token , request , route , parameters ) {
const endpoint = request . endpoint . merge ( route , parameters ) ;
endpoint . headers . authorization = withAuthorizationPrefix ( token ) ;
return request ( endpoint ) ;
}
const createTokenAuth = function createTokenAuth ( token ) {
if ( ! token ) {
throw new Error ( "[@octokit/auth-token] No token passed to createTokenAuth" ) ;
}
if ( typeof token !== "string" ) {
throw new Error ( "[@octokit/auth-token] Token passed to createTokenAuth is not a string" ) ;
}
token = token . replace ( /^(token|bearer) +/i , "" ) ;
return Object . assign ( auth . bind ( null , token ) , {
hook : hook . bind ( null , token )
} ) ;
} ;
exports . createTokenAuth = createTokenAuth ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 351 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
2020-09-29 01:19:32 +02:00
const utils _1 = _ _webpack _require _ _ ( 278 ) ;
2020-09-02 10:07:11 +02:00
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
function escapeData ( s ) {
2020-09-29 01:19:32 +02:00
return utils _1 . toCommandValue ( s )
2020-09-02 10:07:11 +02:00
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
2020-09-29 01:19:32 +02:00
return utils _1 . toCommandValue ( s )
2020-09-02 10:07:11 +02:00
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
/***/ 357 :
/***/ ( function ( module ) {
module . exports = require ( "assert" ) ;
/***/ } ) ,
2020-10-19 21:17:06 +02:00
/***/ 373 :
/***/ ( function ( module ) {
module . exports = require ( "crypto" ) ;
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 380 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// Determine if version is greater than all the versions possible in the range.
const outside = _ _webpack _require _ _ ( 420 )
const gtr = ( version , range , options ) => outside ( version , range , '>' , options )
module . exports = gtr
/***/ } ) ,
/***/ 383 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// just pre-load all the stuff that index.js lazily exports
const internalRe = _ _webpack _require _ _ ( 523 )
module . exports = {
re : internalRe . re ,
src : internalRe . src ,
tokens : internalRe . t ,
SEMVER _SPEC _VERSION : _ _webpack _require _ _ ( 293 ) . SEMVER _SPEC _VERSION ,
SemVer : _ _webpack _require _ _ ( 88 ) ,
compareIdentifiers : _ _webpack _require _ _ ( 463 ) . compareIdentifiers ,
rcompareIdentifiers : _ _webpack _require _ _ ( 463 ) . rcompareIdentifiers ,
parse : _ _webpack _require _ _ ( 925 ) ,
valid : _ _webpack _require _ _ ( 601 ) ,
clean : _ _webpack _require _ _ ( 848 ) ,
inc : _ _webpack _require _ _ ( 900 ) ,
diff : _ _webpack _require _ _ ( 297 ) ,
major : _ _webpack _require _ _ ( 688 ) ,
minor : _ _webpack _require _ _ ( 447 ) ,
patch : _ _webpack _require _ _ ( 866 ) ,
prerelease : _ _webpack _require _ _ ( 16 ) ,
compare : _ _webpack _require _ _ ( 309 ) ,
2020-10-19 21:17:06 +02:00
rcompare : _ _webpack _require _ _ ( 417 ) ,
2020-09-02 10:07:11 +02:00
compareLoose : _ _webpack _require _ _ ( 804 ) ,
compareBuild : _ _webpack _require _ _ ( 156 ) ,
sort : _ _webpack _require _ _ ( 426 ) ,
rsort : _ _webpack _require _ _ ( 701 ) ,
gt : _ _webpack _require _ _ ( 123 ) ,
lt : _ _webpack _require _ _ ( 194 ) ,
eq : _ _webpack _require _ _ ( 898 ) ,
neq : _ _webpack _require _ _ ( 17 ) ,
gte : _ _webpack _require _ _ ( 522 ) ,
lte : _ _webpack _require _ _ ( 520 ) ,
cmp : _ _webpack _require _ _ ( 98 ) ,
coerce : _ _webpack _require _ _ ( 466 ) ,
2020-10-23 18:21:44 +02:00
Comparator : _ _webpack _require _ _ ( 532 ) ,
Range : _ _webpack _require _ _ ( 828 ) ,
satisfies : _ _webpack _require _ _ ( 55 ) ,
toComparators : _ _webpack _require _ _ ( 706 ) ,
maxSatisfying : _ _webpack _require _ _ ( 579 ) ,
minSatisfying : _ _webpack _require _ _ ( 832 ) ,
minVersion : _ _webpack _require _ _ ( 179 ) ,
validRange : _ _webpack _require _ _ ( 741 ) ,
outside : _ _webpack _require _ _ ( 420 ) ,
gtr : _ _webpack _require _ _ ( 380 ) ,
ltr : _ _webpack _require _ _ ( 323 ) ,
intersects : _ _webpack _require _ _ ( 8 ) ,
simplifyRange : _ _webpack _require _ _ ( 561 ) ,
subset : _ _webpack _require _ _ ( 863 ) ,
}
2020-09-02 10:07:11 +02:00
/***/ } ) ,
/***/ 413 :
/***/ ( function ( module ) {
module . exports = require ( "stream" ) ;
/***/ } ) ,
/***/ 417 :
2020-10-19 21:17:06 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const rcompare = ( a , b , loose ) => compare ( b , a , loose )
module . exports = rcompare
2020-09-02 10:07:11 +02:00
/***/ } ) ,
/***/ 420 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const Comparator = _ _webpack _require _ _ ( 532 )
const { ANY } = Comparator
const Range = _ _webpack _require _ _ ( 828 )
const satisfies = _ _webpack _require _ _ ( 55 )
const gt = _ _webpack _require _ _ ( 123 )
const lt = _ _webpack _require _ _ ( 194 )
const lte = _ _webpack _require _ _ ( 520 )
const gte = _ _webpack _require _ _ ( 522 )
const outside = ( version , range , hilo , options ) => {
version = new SemVer ( version , options )
range = new Range ( range , options )
let gtfn , ltefn , ltfn , comp , ecomp
switch ( hilo ) {
case '>' :
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<' :
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default :
throw new TypeError ( 'Must provide a hilo val of "<" or ">"' )
}
// If it satisifes the range it is not outside
if ( satisfies ( version , range , options ) ) {
return false
}
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
for ( let i = 0 ; i < range . set . length ; ++ i ) {
const comparators = range . set [ i ]
let high = null
let low = null
comparators . forEach ( ( comparator ) => {
if ( comparator . semver === ANY ) {
comparator = new Comparator ( '>=0.0.0' )
}
high = high || comparator
low = low || comparator
if ( gtfn ( comparator . semver , high . semver , options ) ) {
high = comparator
} else if ( ltfn ( comparator . semver , low . semver , options ) ) {
low = comparator
}
} )
// If the edge version comparator has a operator then our version
// isn't outside it
if ( high . operator === comp || high . operator === ecomp ) {
return false
}
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ( ( ! low . operator || low . operator === comp ) &&
ltefn ( version , low . semver ) ) {
return false
} else if ( low . operator === ecomp && ltfn ( version , low . semver ) ) {
return false
}
}
return true
}
module . exports = outside
/***/ } ) ,
/***/ 426 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compareBuild = _ _webpack _require _ _ ( 156 )
const sort = ( list , loose ) => list . sort ( ( a , b ) => compareBuild ( a , b , loose ) )
module . exports = sort
/***/ } ) ,
/***/ 427 :
/***/ ( function ( module ) {
const debug = (
typeof process === 'object' &&
process . env &&
process . env . NODE _DEBUG &&
/\bsemver\b/i . test ( process . env . NODE _DEBUG )
) ? ( ... args ) => console . error ( 'SEMVER' , ... args )
: ( ) => { }
module . exports = debug
/***/ } ) ,
/***/ 429 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function getUserAgent ( ) {
if ( typeof navigator === "object" && "userAgent" in navigator ) {
return navigator . userAgent ;
}
if ( typeof process === "object" && "version" in process ) {
return ` Node.js/ ${ process . version . substr ( 1 ) } ( ${ process . platform } ; ${ process . arch } ) ` ;
}
return "<environment undetectable>" ;
}
exports . getUserAgent = getUserAgent ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 436 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const childProcess = _ _webpack _require _ _ ( 129 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const util _1 = _ _webpack _require _ _ ( 669 ) ;
const ioUtil = _ _webpack _require _ _ ( 962 ) ;
const exec = util _1 . promisify ( childProcess . exec ) ;
/ * *
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
* /
function cp ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const { force , recursive } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
}
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( )
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
}
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
}
else {
yield cpDirRecursive ( source , newDest , 0 , force ) ;
}
}
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
}
yield copyFile ( source , newDest , force ) ;
}
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
}
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
}
else {
throw new Error ( 'Destination already exists' ) ;
}
}
}
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
} ) ;
}
exports . mv = mv ;
/ * *
* Remove a path recursively with force
*
* @ param inputPath path to remove
* /
function rmRF ( inputPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ioUtil . IS _WINDOWS ) {
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
try {
if ( yield ioUtil . isDirectory ( inputPath , true ) ) {
yield exec ( ` rd /s /q " ${ inputPath } " ` ) ;
}
else {
yield exec ( ` del /f /a " ${ inputPath } " ` ) ;
}
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
try {
yield ioUtil . unlink ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
}
else {
let isDir = false ;
try {
isDir = yield ioUtil . isDirectory ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
return ;
}
if ( isDir ) {
yield exec ( ` rm -rf " ${ inputPath } " ` ) ;
}
else {
yield ioUtil . unlink ( inputPath ) ;
}
}
} ) ;
}
exports . rmRF = rmRF ;
/ * *
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
*
* @ param fsPath path to create
* @ returns Promise < void >
* /
function mkdirP ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield ioUtil . mkdirP ( fsPath ) ;
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
*
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
* /
function which ( tool , check ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
}
try {
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env . PATHEXT ) {
for ( const extension of process . env . PATHEXT . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
return '' ;
}
// if any path separators, return empty
if ( tool . includes ( '/' ) || ( ioUtil . IS _WINDOWS && tool . includes ( '\\' ) ) ) {
return '' ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// return the first match
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( directory + path . sep + tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
}
return '' ;
}
catch ( err ) {
throw new Error ( ` which failed with message ${ err . message } ` ) ;
}
} ) ;
}
exports . which = which ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
return { force , recursive } ;
}
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
}
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
} ) ;
}
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
}
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
}
} ) ;
}
//# sourceMappingURL=io.js.map
/***/ } ) ,
/***/ 438 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . getOctokit = exports . context = void 0 ;
const Context = _ _importStar ( _ _webpack _require _ _ ( 53 ) ) ;
const utils _1 = _ _webpack _require _ _ ( 30 ) ;
exports . context = new Context . Context ( ) ;
/ * *
* Returns a hydrated octokit ready to use for GitHub Actions
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokit ( token , options ) {
return new utils _1 . GitHub ( utils _1 . getOctokitOptions ( token , options ) ) ;
}
exports . getOctokit = getOctokit ;
//# sourceMappingURL=github.js.map
/***/ } ) ,
/***/ 440 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var isPlainObject = _interopDefault ( _ _webpack _require _ _ ( 38 ) ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
function lowercaseKeys ( object ) {
if ( ! object ) {
return { } ;
}
return Object . keys ( object ) . reduce ( ( newObj , key ) => {
newObj [ key . toLowerCase ( ) ] = object [ key ] ;
return newObj ;
} , { } ) ;
}
function mergeDeep ( defaults , options ) {
const result = Object . assign ( { } , defaults ) ;
Object . keys ( options ) . forEach ( key => {
if ( isPlainObject ( options [ key ] ) ) {
if ( ! ( key in defaults ) ) Object . assign ( result , {
[ key ] : options [ key ]
} ) ; else result [ key ] = mergeDeep ( defaults [ key ] , options [ key ] ) ;
} else {
Object . assign ( result , {
[ key ] : options [ key ]
} ) ;
}
} ) ;
return result ;
}
function merge ( defaults , route , options ) {
if ( typeof route === "string" ) {
let [ method , url ] = route . split ( " " ) ;
options = Object . assign ( url ? {
method ,
url
} : {
url : method
} , options ) ;
} else {
options = Object . assign ( { } , route ) ;
} // lowercase header names before merging with defaults to avoid duplicates
options . headers = lowercaseKeys ( options . headers ) ;
const mergedOptions = mergeDeep ( defaults || { } , options ) ; // mediaType.previews arrays are merged, instead of overwritten
if ( defaults && defaults . mediaType . previews . length ) {
mergedOptions . mediaType . previews = defaults . mediaType . previews . filter ( preview => ! mergedOptions . mediaType . previews . includes ( preview ) ) . concat ( mergedOptions . mediaType . previews ) ;
}
mergedOptions . mediaType . previews = mergedOptions . mediaType . previews . map ( preview => preview . replace ( /-preview/ , "" ) ) ;
return mergedOptions ;
}
function addQueryParameters ( url , parameters ) {
const separator = /\?/ . test ( url ) ? "&" : "?" ;
const names = Object . keys ( parameters ) ;
if ( names . length === 0 ) {
return url ;
}
return url + separator + names . map ( name => {
if ( name === "q" ) {
return "q=" + parameters . q . split ( "+" ) . map ( encodeURIComponent ) . join ( "+" ) ;
}
return ` ${ name } = ${ encodeURIComponent ( parameters [ name ] ) } ` ;
} ) . join ( "&" ) ;
}
const urlVariableRegex = /\{[^}]+\}/g ;
function removeNonChars ( variableName ) {
return variableName . replace ( /^\W+|\W+$/g , "" ) . split ( /,/ ) ;
}
function extractUrlVariableNames ( url ) {
const matches = url . match ( urlVariableRegex ) ;
if ( ! matches ) {
return [ ] ;
}
return matches . map ( removeNonChars ) . reduce ( ( a , b ) => a . concat ( b ) , [ ] ) ;
}
function omit ( object , keysToOmit ) {
return Object . keys ( object ) . filter ( option => ! keysToOmit . includes ( option ) ) . reduce ( ( obj , key ) => {
obj [ key ] = object [ key ] ;
return obj ;
} , { } ) ;
}
// Based on https://github.com/bramstein/url-template, licensed under BSD
// TODO: create separate package.
//
// Copyright (c) 2012-2014, Bram Stein
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. The name of the author may not be used to endorse or promote products
// derived from this software without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/* istanbul ignore file */
function encodeReserved ( str ) {
return str . split ( /(%[0-9A-Fa-f]{2})/g ) . map ( function ( part ) {
if ( ! /%[0-9A-Fa-f]/ . test ( part ) ) {
part = encodeURI ( part ) . replace ( /%5B/g , "[" ) . replace ( /%5D/g , "]" ) ;
}
return part ;
} ) . join ( "" ) ;
}
function encodeUnreserved ( str ) {
return encodeURIComponent ( str ) . replace ( /[!'()*]/g , function ( c ) {
return "%" + c . charCodeAt ( 0 ) . toString ( 16 ) . toUpperCase ( ) ;
} ) ;
}
function encodeValue ( operator , value , key ) {
value = operator === "+" || operator === "#" ? encodeReserved ( value ) : encodeUnreserved ( value ) ;
if ( key ) {
return encodeUnreserved ( key ) + "=" + value ;
} else {
return value ;
}
}
function isDefined ( value ) {
return value !== undefined && value !== null ;
}
function isKeyOperator ( operator ) {
return operator === ";" || operator === "&" || operator === "?" ;
}
function getValues ( context , operator , key , modifier ) {
var value = context [ key ] ,
result = [ ] ;
if ( isDefined ( value ) && value !== "" ) {
if ( typeof value === "string" || typeof value === "number" || typeof value === "boolean" ) {
value = value . toString ( ) ;
if ( modifier && modifier !== "*" ) {
value = value . substring ( 0 , parseInt ( modifier , 10 ) ) ;
}
result . push ( encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" ) ) ;
} else {
if ( modifier === "*" ) {
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value ) {
result . push ( encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
result . push ( encodeValue ( operator , value [ k ] , k ) ) ;
}
} ) ;
}
} else {
const tmp = [ ] ;
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value ) {
tmp . push ( encodeValue ( operator , value ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
tmp . push ( encodeUnreserved ( k ) ) ;
tmp . push ( encodeValue ( operator , value [ k ] . toString ( ) ) ) ;
}
} ) ;
}
if ( isKeyOperator ( operator ) ) {
result . push ( encodeUnreserved ( key ) + "=" + tmp . join ( "," ) ) ;
} else if ( tmp . length !== 0 ) {
result . push ( tmp . join ( "," ) ) ;
}
}
}
} else {
if ( operator === ";" ) {
if ( isDefined ( value ) ) {
result . push ( encodeUnreserved ( key ) ) ;
}
} else if ( value === "" && ( operator === "&" || operator === "?" ) ) {
result . push ( encodeUnreserved ( key ) + "=" ) ;
} else if ( value === "" ) {
result . push ( "" ) ;
}
}
return result ;
}
function parseUrl ( template ) {
return {
expand : expand . bind ( null , template )
} ;
}
function expand ( template , context ) {
var operators = [ "+" , "#" , "." , "/" , ";" , "?" , "&" ] ;
return template . replace ( /\{([^\{\}]+)\}|([^\{\}]+)/g , function ( _ , expression , literal ) {
if ( expression ) {
let operator = "" ;
const values = [ ] ;
if ( operators . indexOf ( expression . charAt ( 0 ) ) !== - 1 ) {
operator = expression . charAt ( 0 ) ;
expression = expression . substr ( 1 ) ;
}
expression . split ( /,/g ) . forEach ( function ( variable ) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/ . exec ( variable ) ;
values . push ( getValues ( context , operator , tmp [ 1 ] , tmp [ 2 ] || tmp [ 3 ] ) ) ;
} ) ;
if ( operator && operator !== "+" ) {
var separator = "," ;
if ( operator === "?" ) {
separator = "&" ;
} else if ( operator !== "#" ) {
separator = operator ;
}
return ( values . length !== 0 ? operator : "" ) + values . join ( separator ) ;
} else {
return values . join ( "," ) ;
}
} else {
return encodeReserved ( literal ) ;
}
} ) ;
}
function parse ( options ) {
// https://fetch.spec.whatwg.org/#methods
let method = options . method . toUpperCase ( ) ; // replace :varname with {varname} to make it RFC 6570 compatible
let url = ( options . url || "/" ) . replace ( /:([a-z]\w+)/g , "{+$1}" ) ;
let headers = Object . assign ( { } , options . headers ) ;
let body ;
let parameters = omit ( options , [ "method" , "baseUrl" , "url" , "headers" , "request" , "mediaType" ] ) ; // extract variable names from URL to calculate remaining variables later
const urlVariableNames = extractUrlVariableNames ( url ) ;
url = parseUrl ( url ) . expand ( parameters ) ;
if ( ! /^http/ . test ( url ) ) {
url = options . baseUrl + url ;
}
const omittedParameters = Object . keys ( options ) . filter ( option => urlVariableNames . includes ( option ) ) . concat ( "baseUrl" ) ;
const remainingParameters = omit ( parameters , omittedParameters ) ;
const isBinaryRequset = /application\/octet-stream/i . test ( headers . accept ) ;
if ( ! isBinaryRequset ) {
if ( options . mediaType . format ) {
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
headers . accept = headers . accept . split ( /,/ ) . map ( preview => preview . replace ( /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/ , ` application/vnd $ 1 $ 2. ${ options . mediaType . format } ` ) ) . join ( "," ) ;
}
if ( options . mediaType . previews . length ) {
const previewsFromAcceptHeader = headers . accept . match ( /[\w-]+(?=-preview)/g ) || [ ] ;
headers . accept = previewsFromAcceptHeader . concat ( options . mediaType . previews ) . map ( preview => {
const format = options . mediaType . format ? ` . ${ options . mediaType . format } ` : "+json" ;
return ` application/vnd.github. ${ preview } -preview ${ format } ` ;
} ) . join ( "," ) ;
}
} // for GET/HEAD requests, set URL query parameters from remaining parameters
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
if ( [ "GET" , "HEAD" ] . includes ( method ) ) {
url = addQueryParameters ( url , remainingParameters ) ;
} else {
if ( "data" in remainingParameters ) {
body = remainingParameters . data ;
} else {
if ( Object . keys ( remainingParameters ) . length ) {
body = remainingParameters ;
} else {
headers [ "content-length" ] = 0 ;
}
}
} // default content-type for JSON if body is set
if ( ! headers [ "content-type" ] && typeof body !== "undefined" ) {
headers [ "content-type" ] = "application/json; charset=utf-8" ;
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
// fetch does not allow to set `content-length` header, but we can set body to an empty string
if ( [ "PATCH" , "PUT" ] . includes ( method ) && typeof body === "undefined" ) {
body = "" ;
} // Only return body/request keys if present
return Object . assign ( {
method ,
url ,
headers
} , typeof body !== "undefined" ? {
body
} : null , options . request ? {
request : options . request
} : null ) ;
}
function endpointWithDefaults ( defaults , route , options ) {
return parse ( merge ( defaults , route , options ) ) ;
}
function withDefaults ( oldDefaults , newDefaults ) {
const DEFAULTS = merge ( oldDefaults , newDefaults ) ;
const endpoint = endpointWithDefaults . bind ( null , DEFAULTS ) ;
return Object . assign ( endpoint , {
DEFAULTS ,
defaults : withDefaults . bind ( null , DEFAULTS ) ,
merge : merge . bind ( null , DEFAULTS ) ,
parse
} ) ;
}
const VERSION = "6.0.5" ;
const userAgent = ` octokit-endpoint.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } ` ; // DEFAULTS has all properties set that EndpointOptions has, except url.
// So we use RequestParameters and add method as additional required property.
const DEFAULTS = {
method : "GET" ,
baseUrl : "https://api.github.com" ,
headers : {
accept : "application/vnd.github.v3+json" ,
"user-agent" : userAgent
} ,
mediaType : {
format : "" ,
previews : [ ]
}
} ;
const endpoint = withDefaults ( null , DEFAULTS ) ;
exports . endpoint = endpoint ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 443 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const url = _ _webpack _require _ _ ( 835 ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
proxyVar = process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
proxyVar = process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
if ( proxyVar ) {
proxyUrl = url . parse ( proxyVar ) ;
}
return proxyUrl ;
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
let noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( let upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
/***/ } ) ,
/***/ 447 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const minor = ( a , loose ) => new SemVer ( a , loose ) . minor
module . exports = minor
/***/ } ) ,
/***/ 463 :
/***/ ( function ( module ) {
const numeric = /^[0-9]+$/
const compareIdentifiers = ( a , b ) => {
const anum = numeric . test ( a )
const bnum = numeric . test ( b )
if ( anum && bnum ) {
a = + a
b = + b
}
return a === b ? 0
: ( anum && ! bnum ) ? - 1
: ( bnum && ! anum ) ? 1
: a < b ? - 1
: 1
}
const rcompareIdentifiers = ( a , b ) => compareIdentifiers ( b , a )
module . exports = {
compareIdentifiers ,
rcompareIdentifiers
}
/***/ } ) ,
/***/ 466 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const parse = _ _webpack _require _ _ ( 925 )
const { re , t } = _ _webpack _require _ _ ( 523 )
const coerce = ( version , options ) => {
if ( version instanceof SemVer ) {
return version
}
if ( typeof version === 'number' ) {
version = String ( version )
}
if ( typeof version !== 'string' ) {
return null
}
options = options || { }
let match = null
if ( ! options . rtl ) {
match = version . match ( re [ t . COERCE ] )
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
let next
while ( ( next = re [ t . COERCERTL ] . exec ( version ) ) &&
( ! match || match . index + match [ 0 ] . length !== version . length )
) {
if ( ! match ||
next . index + next [ 0 ] . length !== match . index + match [ 0 ] . length ) {
match = next
}
re [ t . COERCERTL ] . lastIndex = next . index + next [ 1 ] . length + next [ 2 ] . length
}
// leave it in a clean state
re [ t . COERCERTL ] . lastIndex = - 1
}
if ( match === null )
return null
return parse ( ` ${ match [ 2 ] } . ${ match [ 3 ] || '0' } . ${ match [ 4 ] || '0' } ` , options )
}
module . exports = coerce
/***/ } ) ,
/***/ 467 :
/***/ ( function ( module , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var Stream = _interopDefault ( _ _webpack _require _ _ ( 413 ) ) ;
var http = _interopDefault ( _ _webpack _require _ _ ( 605 ) ) ;
var Url = _interopDefault ( _ _webpack _require _ _ ( 835 ) ) ;
var https = _interopDefault ( _ _webpack _require _ _ ( 211 ) ) ;
var zlib = _interopDefault ( _ _webpack _require _ _ ( 761 ) ) ;
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
// fix for "Readable" isn't a named export issue
const Readable = Stream . Readable ;
const BUFFER = Symbol ( 'buffer' ) ;
const TYPE = Symbol ( 'type' ) ;
class Blob {
constructor ( ) {
this [ TYPE ] = '' ;
const blobParts = arguments [ 0 ] ;
const options = arguments [ 1 ] ;
const buffers = [ ] ;
let size = 0 ;
if ( blobParts ) {
const a = blobParts ;
const length = Number ( a . length ) ;
for ( let i = 0 ; i < length ; i ++ ) {
const element = a [ i ] ;
let buffer ;
if ( element instanceof Buffer ) {
buffer = element ;
} else if ( ArrayBuffer . isView ( element ) ) {
buffer = Buffer . from ( element . buffer , element . byteOffset , element . byteLength ) ;
} else if ( element instanceof ArrayBuffer ) {
buffer = Buffer . from ( element ) ;
} else if ( element instanceof Blob ) {
buffer = element [ BUFFER ] ;
} else {
buffer = Buffer . from ( typeof element === 'string' ? element : String ( element ) ) ;
}
size += buffer . length ;
buffers . push ( buffer ) ;
}
}
this [ BUFFER ] = Buffer . concat ( buffers ) ;
let type = options && options . type !== undefined && String ( options . type ) . toLowerCase ( ) ;
if ( type && ! /[^\u0020-\u007E]/ . test ( type ) ) {
this [ TYPE ] = type ;
}
}
get size ( ) {
return this [ BUFFER ] . length ;
}
get type ( ) {
return this [ TYPE ] ;
}
text ( ) {
return Promise . resolve ( this [ BUFFER ] . toString ( ) ) ;
}
arrayBuffer ( ) {
const buf = this [ BUFFER ] ;
const ab = buf . buffer . slice ( buf . byteOffset , buf . byteOffset + buf . byteLength ) ;
return Promise . resolve ( ab ) ;
}
stream ( ) {
const readable = new Readable ( ) ;
readable . _read = function ( ) { } ;
readable . push ( this [ BUFFER ] ) ;
readable . push ( null ) ;
return readable ;
}
toString ( ) {
return '[object Blob]' ;
}
slice ( ) {
const size = this . size ;
const start = arguments [ 0 ] ;
const end = arguments [ 1 ] ;
let relativeStart , relativeEnd ;
if ( start === undefined ) {
relativeStart = 0 ;
} else if ( start < 0 ) {
relativeStart = Math . max ( size + start , 0 ) ;
} else {
relativeStart = Math . min ( start , size ) ;
}
if ( end === undefined ) {
relativeEnd = size ;
} else if ( end < 0 ) {
relativeEnd = Math . max ( size + end , 0 ) ;
} else {
relativeEnd = Math . min ( end , size ) ;
}
const span = Math . max ( relativeEnd - relativeStart , 0 ) ;
const buffer = this [ BUFFER ] ;
const slicedBuffer = buffer . slice ( relativeStart , relativeStart + span ) ;
const blob = new Blob ( [ ] , { type : arguments [ 2 ] } ) ;
blob [ BUFFER ] = slicedBuffer ;
return blob ;
}
}
Object . defineProperties ( Blob . prototype , {
size : { enumerable : true } ,
type : { enumerable : true } ,
slice : { enumerable : true }
} ) ;
Object . defineProperty ( Blob . prototype , Symbol . toStringTag , {
value : 'Blob' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
/ * *
* fetch - error . js
*
* FetchError interface for operational errors
* /
/ * *
* Create FetchError instance
*
* @ param String message Error message for human
* @ param String type Error type for machine
* @ param String systemError For Node . js system error
* @ return FetchError
* /
function FetchError ( message , type , systemError ) {
Error . call ( this , message ) ;
this . message = message ;
this . type = type ;
// when err.type is `system`, err.code contains system error code
if ( systemError ) {
this . code = this . errno = systemError . code ;
}
// hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
}
FetchError . prototype = Object . create ( Error . prototype ) ;
FetchError . prototype . constructor = FetchError ;
FetchError . prototype . name = 'FetchError' ;
let convert ;
try {
2020-10-23 18:21:44 +02:00
convert = _ _webpack _require _ _ ( 877 ) . convert ;
2020-09-02 10:07:11 +02:00
} catch ( e ) { }
const INTERNALS = Symbol ( 'Body internals' ) ;
// fix an issue where "PassThrough" isn't a named export for node <10
const PassThrough = Stream . PassThrough ;
/ * *
* Body mixin
*
* Ref : https : //fetch.spec.whatwg.org/#body
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
function Body ( body ) {
var _this = this ;
var _ref = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ,
_ref$size = _ref . size ;
let size = _ref$size === undefined ? 0 : _ref$size ;
var _ref$timeout = _ref . timeout ;
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout ;
if ( body == null ) {
// body is undefined or null
body = null ;
} else if ( isURLSearchParams ( body ) ) {
// body is a URLSearchParams
body = Buffer . from ( body . toString ( ) ) ;
} else if ( isBlob ( body ) ) ; else if ( Buffer . isBuffer ( body ) ) ; else if ( Object . prototype . toString . call ( body ) === '[object ArrayBuffer]' ) {
// body is ArrayBuffer
body = Buffer . from ( body ) ;
} else if ( ArrayBuffer . isView ( body ) ) {
// body is ArrayBufferView
body = Buffer . from ( body . buffer , body . byteOffset , body . byteLength ) ;
} else if ( body instanceof Stream ) ; else {
// none of the above
// coerce to string then buffer
body = Buffer . from ( String ( body ) ) ;
}
this [ INTERNALS ] = {
body ,
disturbed : false ,
error : null
} ;
this . size = size ;
this . timeout = timeout ;
if ( body instanceof Stream ) {
body . on ( 'error' , function ( err ) {
const error = err . name === 'AbortError' ? err : new FetchError ( ` Invalid response body while trying to fetch ${ _this . url } : ${ err . message } ` , 'system' , err ) ;
_this [ INTERNALS ] . error = error ;
} ) ;
}
}
Body . prototype = {
get body ( ) {
return this [ INTERNALS ] . body ;
} ,
get bodyUsed ( ) {
return this [ INTERNALS ] . disturbed ;
} ,
/ * *
* Decode response as ArrayBuffer
*
* @ return Promise
* /
arrayBuffer ( ) {
return consumeBody . call ( this ) . then ( function ( buf ) {
return buf . buffer . slice ( buf . byteOffset , buf . byteOffset + buf . byteLength ) ;
} ) ;
} ,
/ * *
* Return raw response as Blob
*
* @ return Promise
* /
blob ( ) {
let ct = this . headers && this . headers . get ( 'content-type' ) || '' ;
return consumeBody . call ( this ) . then ( function ( buf ) {
return Object . assign (
// Prevent copying
new Blob ( [ ] , {
type : ct . toLowerCase ( )
} ) , {
[ BUFFER ] : buf
} ) ;
} ) ;
} ,
/ * *
* Decode response as json
*
* @ return Promise
* /
json ( ) {
var _this2 = this ;
return consumeBody . call ( this ) . then ( function ( buffer ) {
try {
return JSON . parse ( buffer . toString ( ) ) ;
} catch ( err ) {
return Body . Promise . reject ( new FetchError ( ` invalid json response body at ${ _this2 . url } reason: ${ err . message } ` , 'invalid-json' ) ) ;
}
} ) ;
} ,
/ * *
* Decode response as text
*
* @ return Promise
* /
text ( ) {
return consumeBody . call ( this ) . then ( function ( buffer ) {
return buffer . toString ( ) ;
} ) ;
} ,
/ * *
* Decode response as buffer ( non - spec api )
*
* @ return Promise
* /
buffer ( ) {
return consumeBody . call ( this ) ;
} ,
/ * *
* Decode response as text , while automatically detecting the encoding and
* trying to decode to UTF - 8 ( non - spec api )
*
* @ return Promise
* /
textConverted ( ) {
var _this3 = this ;
return consumeBody . call ( this ) . then ( function ( buffer ) {
return convertBody ( buffer , _this3 . headers ) ;
} ) ;
}
} ;
// In browsers, all properties are enumerable.
Object . defineProperties ( Body . prototype , {
body : { enumerable : true } ,
bodyUsed : { enumerable : true } ,
arrayBuffer : { enumerable : true } ,
blob : { enumerable : true } ,
json : { enumerable : true } ,
text : { enumerable : true }
} ) ;
Body . mixIn = function ( proto ) {
for ( const name of Object . getOwnPropertyNames ( Body . prototype ) ) {
// istanbul ignore else: future proof
if ( ! ( name in proto ) ) {
const desc = Object . getOwnPropertyDescriptor ( Body . prototype , name ) ;
Object . defineProperty ( proto , name , desc ) ;
}
}
} ;
/ * *
* Consume and convert an entire Body to a Buffer .
*
* Ref : https : //fetch.spec.whatwg.org/#concept-body-consume-body
*
* @ return Promise
* /
function consumeBody ( ) {
var _this4 = this ;
if ( this [ INTERNALS ] . disturbed ) {
return Body . Promise . reject ( new TypeError ( ` body used already for: ${ this . url } ` ) ) ;
}
this [ INTERNALS ] . disturbed = true ;
if ( this [ INTERNALS ] . error ) {
return Body . Promise . reject ( this [ INTERNALS ] . error ) ;
}
let body = this . body ;
// body is null
if ( body === null ) {
return Body . Promise . resolve ( Buffer . alloc ( 0 ) ) ;
}
// body is blob
if ( isBlob ( body ) ) {
body = body . stream ( ) ;
}
// body is buffer
if ( Buffer . isBuffer ( body ) ) {
return Body . Promise . resolve ( body ) ;
}
// istanbul ignore if: should never happen
if ( ! ( body instanceof Stream ) ) {
return Body . Promise . resolve ( Buffer . alloc ( 0 ) ) ;
}
// body is stream
// get ready to actually consume the body
let accum = [ ] ;
let accumBytes = 0 ;
let abort = false ;
return new Body . Promise ( function ( resolve , reject ) {
let resTimeout ;
// allow timeout on slow response body
if ( _this4 . timeout ) {
resTimeout = setTimeout ( function ( ) {
abort = true ;
reject ( new FetchError ( ` Response timeout while trying to fetch ${ _this4 . url } (over ${ _this4 . timeout } ms) ` , 'body-timeout' ) ) ;
} , _this4 . timeout ) ;
}
// handle stream errors
body . on ( 'error' , function ( err ) {
if ( err . name === 'AbortError' ) {
// if the request was aborted, reject with this Error
abort = true ;
reject ( err ) ;
} else {
// other errors, such as incorrect content-encoding
reject ( new FetchError ( ` Invalid response body while trying to fetch ${ _this4 . url } : ${ err . message } ` , 'system' , err ) ) ;
}
} ) ;
body . on ( 'data' , function ( chunk ) {
if ( abort || chunk === null ) {
return ;
}
if ( _this4 . size && accumBytes + chunk . length > _this4 . size ) {
abort = true ;
reject ( new FetchError ( ` content size at ${ _this4 . url } over limit: ${ _this4 . size } ` , 'max-size' ) ) ;
return ;
}
accumBytes += chunk . length ;
accum . push ( chunk ) ;
} ) ;
body . on ( 'end' , function ( ) {
if ( abort ) {
return ;
}
clearTimeout ( resTimeout ) ;
try {
resolve ( Buffer . concat ( accum , accumBytes ) ) ;
} catch ( err ) {
// handle streams that have accumulated too much data (issue #414)
reject ( new FetchError ( ` Could not create Buffer from response body for ${ _this4 . url } : ${ err . message } ` , 'system' , err ) ) ;
}
} ) ;
} ) ;
}
/ * *
* Detect buffer encoding and convert to target encoding
* ref : http : //www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
*
* @ param Buffer buffer Incoming buffer
* @ param String encoding Target encoding
* @ return String
* /
function convertBody ( buffer , headers ) {
if ( typeof convert !== 'function' ) {
throw new Error ( 'The package `encoding` must be installed to use the textConverted() function' ) ;
}
const ct = headers . get ( 'content-type' ) ;
let charset = 'utf-8' ;
let res , str ;
// header
if ( ct ) {
res = /charset=([^;]*)/i . exec ( ct ) ;
}
// no charset in content type, peek at response body for at most 1024 bytes
str = buffer . slice ( 0 , 1024 ) . toString ( ) ;
// html5
if ( ! res && str ) {
res = /<meta.+?charset=(['"])(.+?)\1/i . exec ( str ) ;
}
// html4
if ( ! res && str ) {
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i . exec ( str ) ;
2020-09-22 20:49:18 +02:00
if ( ! res ) {
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i . exec ( str ) ;
if ( res ) {
res . pop ( ) ; // drop last quote
}
}
2020-09-02 10:07:11 +02:00
if ( res ) {
res = /charset=(.*)/i . exec ( res . pop ( ) ) ;
}
}
// xml
if ( ! res && str ) {
res = /<\?xml.+?encoding=(['"])(.+?)\1/i . exec ( str ) ;
}
// found charset
if ( res ) {
charset = res . pop ( ) ;
// prevent decode issues when sites use incorrect encoding
// ref: https://hsivonen.fi/encoding-menu/
if ( charset === 'gb2312' || charset === 'gbk' ) {
charset = 'gb18030' ;
}
}
// turn raw buffers into a single utf-8 buffer
return convert ( buffer , 'UTF-8' , charset ) . toString ( ) ;
}
/ * *
* Detect a URLSearchParams object
* ref : https : //github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
*
* @ param Object obj Object to detect by type or brand
* @ return String
* /
function isURLSearchParams ( obj ) {
// Duck-typing as a necessary condition.
if ( typeof obj !== 'object' || typeof obj . append !== 'function' || typeof obj . delete !== 'function' || typeof obj . get !== 'function' || typeof obj . getAll !== 'function' || typeof obj . has !== 'function' || typeof obj . set !== 'function' ) {
return false ;
}
// Brand-checking and more duck-typing as optional condition.
return obj . constructor . name === 'URLSearchParams' || Object . prototype . toString . call ( obj ) === '[object URLSearchParams]' || typeof obj . sort === 'function' ;
}
/ * *
* Check if ` obj ` is a W3C ` Blob ` object ( which ` File ` inherits from )
* @ param { * } obj
* @ return { boolean }
* /
function isBlob ( obj ) {
return typeof obj === 'object' && typeof obj . arrayBuffer === 'function' && typeof obj . type === 'string' && typeof obj . stream === 'function' && typeof obj . constructor === 'function' && typeof obj . constructor . name === 'string' && /^(Blob|File)$/ . test ( obj . constructor . name ) && /^(Blob|File)$/ . test ( obj [ Symbol . toStringTag ] ) ;
}
/ * *
* Clone body given Res / Req instance
*
* @ param Mixed instance Response or Request instance
* @ return Mixed
* /
function clone ( instance ) {
let p1 , p2 ;
let body = instance . body ;
// don't allow cloning a used body
if ( instance . bodyUsed ) {
throw new Error ( 'cannot clone body after it is used' ) ;
}
// check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if ( body instanceof Stream && typeof body . getBoundary !== 'function' ) {
// tee instance body
p1 = new PassThrough ( ) ;
p2 = new PassThrough ( ) ;
body . pipe ( p1 ) ;
body . pipe ( p2 ) ;
// set instance body to teed body and return the other teed body
instance [ INTERNALS ] . body = p1 ;
body = p2 ;
}
return body ;
}
/ * *
* Performs the operation "extract a `Content-Type` value from |object|" as
* specified in the specification :
* https : //fetch.spec.whatwg.org/#concept-bodyinit-extract
*
* This function assumes that instance . body is present .
*
* @ param Mixed instance Any options . body input
* /
function extractContentType ( body ) {
if ( body === null ) {
// body is null
return null ;
} else if ( typeof body === 'string' ) {
// body is string
return 'text/plain;charset=UTF-8' ;
} else if ( isURLSearchParams ( body ) ) {
// body is a URLSearchParams
return 'application/x-www-form-urlencoded;charset=UTF-8' ;
} else if ( isBlob ( body ) ) {
// body is blob
return body . type || null ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
return null ;
} else if ( Object . prototype . toString . call ( body ) === '[object ArrayBuffer]' ) {
// body is ArrayBuffer
return null ;
} else if ( ArrayBuffer . isView ( body ) ) {
// body is ArrayBufferView
return null ;
} else if ( typeof body . getBoundary === 'function' ) {
// detect form data input from form-data module
return ` multipart/form-data;boundary= ${ body . getBoundary ( ) } ` ;
} else if ( body instanceof Stream ) {
// body is stream
// can't really do much about this
return null ;
} else {
// Body constructor defaults other things to string
return 'text/plain;charset=UTF-8' ;
}
}
/ * *
* The Fetch Standard treats this as if "total bytes" is a property on the body .
* For us , we have to explicitly get it with a function .
*
* ref : https : //fetch.spec.whatwg.org/#concept-body-total-bytes
*
* @ param Body instance Instance of Body
* @ return Number ? Number of bytes , or null if not possible
* /
function getTotalBytes ( instance ) {
const body = instance . body ;
if ( body === null ) {
// body is null
return 0 ;
} else if ( isBlob ( body ) ) {
return body . size ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
return body . length ;
} else if ( body && typeof body . getLengthSync === 'function' ) {
// detect form data input from form-data module
if ( body . _lengthRetrievers && body . _lengthRetrievers . length == 0 || // 1.x
body . hasKnownLength && body . hasKnownLength ( ) ) {
// 2.x
return body . getLengthSync ( ) ;
}
return null ;
} else {
// body is stream
return null ;
}
}
/ * *
* Write a Body to a Node . js WritableStream ( e . g . http . Request ) object .
*
* @ param Body instance Instance of Body
* @ return Void
* /
function writeToStream ( dest , instance ) {
const body = instance . body ;
if ( body === null ) {
// body is null
dest . end ( ) ;
} else if ( isBlob ( body ) ) {
body . stream ( ) . pipe ( dest ) ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
dest . write ( body ) ;
dest . end ( ) ;
} else {
// body is stream
body . pipe ( dest ) ;
}
}
// expose Promise
Body . Promise = global . Promise ;
/ * *
* headers . js
*
* Headers class offers convenient helpers
* /
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/ ;
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ ;
function validateName ( name ) {
name = ` ${ name } ` ;
if ( invalidTokenRegex . test ( name ) || name === '' ) {
throw new TypeError ( ` ${ name } is not a legal HTTP header name ` ) ;
}
}
function validateValue ( value ) {
value = ` ${ value } ` ;
if ( invalidHeaderCharRegex . test ( value ) ) {
throw new TypeError ( ` ${ value } is not a legal HTTP header value ` ) ;
}
}
/ * *
* Find the key in the map object given a header name .
*
* Returns undefined if not found .
*
* @ param String name Header name
* @ return String | Undefined
* /
function find ( map , name ) {
name = name . toLowerCase ( ) ;
for ( const key in map ) {
if ( key . toLowerCase ( ) === name ) {
return key ;
}
}
return undefined ;
}
const MAP = Symbol ( 'map' ) ;
class Headers {
/ * *
* Headers class
*
* @ param Object headers Response headers
* @ return Void
* /
constructor ( ) {
let init = arguments . length > 0 && arguments [ 0 ] !== undefined ? arguments [ 0 ] : undefined ;
this [ MAP ] = Object . create ( null ) ;
if ( init instanceof Headers ) {
const rawHeaders = init . raw ( ) ;
const headerNames = Object . keys ( rawHeaders ) ;
for ( const headerName of headerNames ) {
for ( const value of rawHeaders [ headerName ] ) {
this . append ( headerName , value ) ;
}
}
return ;
}
// We don't worry about converting prop to ByteString here as append()
// will handle it.
if ( init == null ) ; else if ( typeof init === 'object' ) {
const method = init [ Symbol . iterator ] ;
if ( method != null ) {
if ( typeof method !== 'function' ) {
throw new TypeError ( 'Header pairs must be iterable' ) ;
}
// sequence<sequence<ByteString>>
// Note: per spec we have to first exhaust the lists then process them
const pairs = [ ] ;
for ( const pair of init ) {
if ( typeof pair !== 'object' || typeof pair [ Symbol . iterator ] !== 'function' ) {
throw new TypeError ( 'Each header pair must be iterable' ) ;
}
pairs . push ( Array . from ( pair ) ) ;
}
for ( const pair of pairs ) {
if ( pair . length !== 2 ) {
throw new TypeError ( 'Each header pair must be a name/value tuple' ) ;
}
this . append ( pair [ 0 ] , pair [ 1 ] ) ;
}
} else {
// record<ByteString, ByteString>
for ( const key of Object . keys ( init ) ) {
const value = init [ key ] ;
this . append ( key , value ) ;
}
}
} else {
throw new TypeError ( 'Provided initializer must be an object' ) ;
}
}
/ * *
* Return combined header value given name
*
* @ param String name Header name
* @ return Mixed
* /
get ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key === undefined ) {
return null ;
}
return this [ MAP ] [ key ] . join ( ', ' ) ;
}
/ * *
* Iterate over all headers
*
* @ param Function callback Executed for each item with parameters ( value , name , thisArg )
* @ param Boolean thisArg ` this ` context for callback function
* @ return Void
* /
forEach ( callback ) {
let thisArg = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : undefined ;
let pairs = getHeaders ( this ) ;
let i = 0 ;
while ( i < pairs . length ) {
var _pairs$i = pairs [ i ] ;
const name = _pairs$i [ 0 ] ,
value = _pairs$i [ 1 ] ;
callback . call ( thisArg , value , name , this ) ;
pairs = getHeaders ( this ) ;
i ++ ;
}
}
/ * *
* Overwrite header values given name
*
* @ param String name Header name
* @ param String value Header value
* @ return Void
* /
set ( name , value ) {
name = ` ${ name } ` ;
value = ` ${ value } ` ;
validateName ( name ) ;
validateValue ( value ) ;
const key = find ( this [ MAP ] , name ) ;
this [ MAP ] [ key !== undefined ? key : name ] = [ value ] ;
}
/ * *
* Append a value onto existing header
*
* @ param String name Header name
* @ param String value Header value
* @ return Void
* /
append ( name , value ) {
name = ` ${ name } ` ;
value = ` ${ value } ` ;
validateName ( name ) ;
validateValue ( value ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key !== undefined ) {
this [ MAP ] [ key ] . push ( value ) ;
} else {
this [ MAP ] [ name ] = [ value ] ;
}
}
/ * *
* Check for header name existence
*
* @ param String name Header name
* @ return Boolean
* /
has ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
return find ( this [ MAP ] , name ) !== undefined ;
}
/ * *
* Delete all header values given name
*
* @ param String name Header name
* @ return Void
* /
delete ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key !== undefined ) {
delete this [ MAP ] [ key ] ;
}
}
/ * *
* Return raw headers ( non - spec api )
*
* @ return Object
* /
raw ( ) {
return this [ MAP ] ;
}
/ * *
* Get an iterator on keys .
*
* @ return Iterator
* /
keys ( ) {
return createHeadersIterator ( this , 'key' ) ;
}
/ * *
* Get an iterator on values .
*
* @ return Iterator
* /
values ( ) {
return createHeadersIterator ( this , 'value' ) ;
}
/ * *
* Get an iterator on entries .
*
* This is the default iterator of the Headers object .
*
* @ return Iterator
* /
[ Symbol . iterator ] ( ) {
return createHeadersIterator ( this , 'key+value' ) ;
}
}
Headers . prototype . entries = Headers . prototype [ Symbol . iterator ] ;
Object . defineProperty ( Headers . prototype , Symbol . toStringTag , {
value : 'Headers' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
Object . defineProperties ( Headers . prototype , {
get : { enumerable : true } ,
forEach : { enumerable : true } ,
set : { enumerable : true } ,
append : { enumerable : true } ,
has : { enumerable : true } ,
delete : { enumerable : true } ,
keys : { enumerable : true } ,
values : { enumerable : true } ,
entries : { enumerable : true }
} ) ;
function getHeaders ( headers ) {
let kind = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : 'key+value' ;
const keys = Object . keys ( headers [ MAP ] ) . sort ( ) ;
return keys . map ( kind === 'key' ? function ( k ) {
return k . toLowerCase ( ) ;
} : kind === 'value' ? function ( k ) {
return headers [ MAP ] [ k ] . join ( ', ' ) ;
} : function ( k ) {
return [ k . toLowerCase ( ) , headers [ MAP ] [ k ] . join ( ', ' ) ] ;
} ) ;
}
const INTERNAL = Symbol ( 'internal' ) ;
function createHeadersIterator ( target , kind ) {
const iterator = Object . create ( HeadersIteratorPrototype ) ;
iterator [ INTERNAL ] = {
target ,
kind ,
index : 0
} ;
return iterator ;
}
const HeadersIteratorPrototype = Object . setPrototypeOf ( {
next ( ) {
// istanbul ignore if
if ( ! this || Object . getPrototypeOf ( this ) !== HeadersIteratorPrototype ) {
throw new TypeError ( 'Value of `this` is not a HeadersIterator' ) ;
}
var _INTERNAL = this [ INTERNAL ] ;
const target = _INTERNAL . target ,
kind = _INTERNAL . kind ,
index = _INTERNAL . index ;
const values = getHeaders ( target , kind ) ;
const len = values . length ;
if ( index >= len ) {
return {
value : undefined ,
done : true
} ;
}
this [ INTERNAL ] . index = index + 1 ;
return {
value : values [ index ] ,
done : false
} ;
}
} , Object . getPrototypeOf ( Object . getPrototypeOf ( [ ] [ Symbol . iterator ] ( ) ) ) ) ;
Object . defineProperty ( HeadersIteratorPrototype , Symbol . toStringTag , {
value : 'HeadersIterator' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
/ * *
* Export the Headers object in a form that Node . js can consume .
*
* @ param Headers headers
* @ return Object
* /
function exportNodeCompatibleHeaders ( headers ) {
const obj = Object . assign ( { _ _proto _ _ : null } , headers [ MAP ] ) ;
// http.request() only supports string as Host header. This hack makes
// specifying custom Host header possible.
const hostHeaderKey = find ( headers [ MAP ] , 'Host' ) ;
if ( hostHeaderKey !== undefined ) {
obj [ hostHeaderKey ] = obj [ hostHeaderKey ] [ 0 ] ;
}
return obj ;
}
/ * *
* Create a Headers object from an object of headers , ignoring those that do
* not conform to HTTP grammar productions .
*
* @ param Object obj Object of headers
* @ return Headers
* /
function createHeadersLenient ( obj ) {
const headers = new Headers ( ) ;
for ( const name of Object . keys ( obj ) ) {
if ( invalidTokenRegex . test ( name ) ) {
continue ;
}
if ( Array . isArray ( obj [ name ] ) ) {
for ( const val of obj [ name ] ) {
if ( invalidHeaderCharRegex . test ( val ) ) {
continue ;
}
if ( headers [ MAP ] [ name ] === undefined ) {
headers [ MAP ] [ name ] = [ val ] ;
} else {
headers [ MAP ] [ name ] . push ( val ) ;
}
}
} else if ( ! invalidHeaderCharRegex . test ( obj [ name ] ) ) {
headers [ MAP ] [ name ] = [ obj [ name ] ] ;
}
}
return headers ;
}
const INTERNALS$1 = Symbol ( 'Response internals' ) ;
// fix an issue where "STATUS_CODES" aren't a named export for node <10
const STATUS _CODES = http . STATUS _CODES ;
/ * *
* Response class
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
class Response {
constructor ( ) {
let body = arguments . length > 0 && arguments [ 0 ] !== undefined ? arguments [ 0 ] : null ;
let opts = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ;
Body . call ( this , body , opts ) ;
const status = opts . status || 200 ;
const headers = new Headers ( opts . headers ) ;
if ( body != null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( body ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
this [ INTERNALS$1 ] = {
url : opts . url ,
status ,
statusText : opts . statusText || STATUS _CODES [ status ] ,
headers ,
counter : opts . counter
} ;
}
get url ( ) {
return this [ INTERNALS$1 ] . url || '' ;
}
get status ( ) {
return this [ INTERNALS$1 ] . status ;
}
/ * *
* Convenience property representing if the request ended normally
* /
get ok ( ) {
return this [ INTERNALS$1 ] . status >= 200 && this [ INTERNALS$1 ] . status < 300 ;
}
get redirected ( ) {
return this [ INTERNALS$1 ] . counter > 0 ;
}
get statusText ( ) {
return this [ INTERNALS$1 ] . statusText ;
}
get headers ( ) {
return this [ INTERNALS$1 ] . headers ;
}
/ * *
* Clone this response
*
* @ return Response
* /
clone ( ) {
return new Response ( clone ( this ) , {
url : this . url ,
status : this . status ,
statusText : this . statusText ,
headers : this . headers ,
ok : this . ok ,
redirected : this . redirected
} ) ;
}
}
Body . mixIn ( Response . prototype ) ;
Object . defineProperties ( Response . prototype , {
url : { enumerable : true } ,
status : { enumerable : true } ,
ok : { enumerable : true } ,
redirected : { enumerable : true } ,
statusText : { enumerable : true } ,
headers : { enumerable : true } ,
clone : { enumerable : true }
} ) ;
Object . defineProperty ( Response . prototype , Symbol . toStringTag , {
value : 'Response' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
const INTERNALS$2 = Symbol ( 'Request internals' ) ;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse _url = Url . parse ;
const format _url = Url . format ;
const streamDestructionSupported = 'destroy' in Stream . Readable . prototype ;
/ * *
* Check if a value is an instance of Request .
*
* @ param Mixed input
* @ return Boolean
* /
function isRequest ( input ) {
return typeof input === 'object' && typeof input [ INTERNALS$2 ] === 'object' ;
}
function isAbortSignal ( signal ) {
const proto = signal && typeof signal === 'object' && Object . getPrototypeOf ( signal ) ;
return ! ! ( proto && proto . constructor . name === 'AbortSignal' ) ;
}
/ * *
* Request class
*
* @ param Mixed input Url or Request instance
* @ param Object init Custom options
* @ return Void
* /
class Request {
constructor ( input ) {
let init = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ;
let parsedURL ;
// normalize input
if ( ! isRequest ( input ) ) {
if ( input && input . href ) {
// in order to support Node.js' Url objects; though WHATWG's URL objects
// will fall into this branch also (since their `toString()` will return
// `href` property anyway)
parsedURL = parse _url ( input . href ) ;
} else {
// coerce input to a string before attempting to parse
parsedURL = parse _url ( ` ${ input } ` ) ;
}
input = { } ;
} else {
parsedURL = parse _url ( input . url ) ;
}
let method = init . method || input . method || 'GET' ;
method = method . toUpperCase ( ) ;
if ( ( init . body != null || isRequest ( input ) && input . body !== null ) && ( method === 'GET' || method === 'HEAD' ) ) {
throw new TypeError ( 'Request with GET/HEAD method cannot have body' ) ;
}
let inputBody = init . body != null ? init . body : isRequest ( input ) && input . body !== null ? clone ( input ) : null ;
Body . call ( this , inputBody , {
timeout : init . timeout || input . timeout || 0 ,
size : init . size || input . size || 0
} ) ;
const headers = new Headers ( init . headers || input . headers || { } ) ;
if ( inputBody != null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( inputBody ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
let signal = isRequest ( input ) ? input . signal : null ;
if ( 'signal' in init ) signal = init . signal ;
if ( signal != null && ! isAbortSignal ( signal ) ) {
throw new TypeError ( 'Expected signal to be an instanceof AbortSignal' ) ;
}
this [ INTERNALS$2 ] = {
method ,
redirect : init . redirect || input . redirect || 'follow' ,
headers ,
parsedURL ,
signal
} ;
// node-fetch-only options
this . follow = init . follow !== undefined ? init . follow : input . follow !== undefined ? input . follow : 20 ;
this . compress = init . compress !== undefined ? init . compress : input . compress !== undefined ? input . compress : true ;
this . counter = init . counter || input . counter || 0 ;
this . agent = init . agent || input . agent ;
}
get method ( ) {
return this [ INTERNALS$2 ] . method ;
}
get url ( ) {
return format _url ( this [ INTERNALS$2 ] . parsedURL ) ;
}
get headers ( ) {
return this [ INTERNALS$2 ] . headers ;
}
get redirect ( ) {
return this [ INTERNALS$2 ] . redirect ;
}
get signal ( ) {
return this [ INTERNALS$2 ] . signal ;
}
/ * *
* Clone this request
*
* @ return Request
* /
clone ( ) {
return new Request ( this ) ;
}
}
Body . mixIn ( Request . prototype ) ;
Object . defineProperty ( Request . prototype , Symbol . toStringTag , {
value : 'Request' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
Object . defineProperties ( Request . prototype , {
method : { enumerable : true } ,
url : { enumerable : true } ,
headers : { enumerable : true } ,
redirect : { enumerable : true } ,
clone : { enumerable : true } ,
signal : { enumerable : true }
} ) ;
/ * *
* Convert a Request to Node . js http request options .
*
* @ param Request A Request instance
* @ return Object The options object to be passed to http . request
* /
function getNodeRequestOptions ( request ) {
const parsedURL = request [ INTERNALS$2 ] . parsedURL ;
const headers = new Headers ( request [ INTERNALS$2 ] . headers ) ;
// fetch step 1.3
if ( ! headers . has ( 'Accept' ) ) {
headers . set ( 'Accept' , '*/*' ) ;
}
// Basic fetch
if ( ! parsedURL . protocol || ! parsedURL . hostname ) {
throw new TypeError ( 'Only absolute URLs are supported' ) ;
}
if ( ! /^https?:$/ . test ( parsedURL . protocol ) ) {
throw new TypeError ( 'Only HTTP(S) protocols are supported' ) ;
}
if ( request . signal && request . body instanceof Stream . Readable && ! streamDestructionSupported ) {
throw new Error ( 'Cancellation of streamed requests with AbortSignal is not supported in node < 8' ) ;
}
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null ;
if ( request . body == null && /^(POST|PUT)$/i . test ( request . method ) ) {
contentLengthValue = '0' ;
}
if ( request . body != null ) {
const totalBytes = getTotalBytes ( request ) ;
if ( typeof totalBytes === 'number' ) {
contentLengthValue = String ( totalBytes ) ;
}
}
if ( contentLengthValue ) {
headers . set ( 'Content-Length' , contentLengthValue ) ;
}
// HTTP-network-or-cache fetch step 2.11
if ( ! headers . has ( 'User-Agent' ) ) {
headers . set ( 'User-Agent' , 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)' ) ;
}
// HTTP-network-or-cache fetch step 2.15
if ( request . compress && ! headers . has ( 'Accept-Encoding' ) ) {
headers . set ( 'Accept-Encoding' , 'gzip,deflate' ) ;
}
let agent = request . agent ;
if ( typeof agent === 'function' ) {
agent = agent ( parsedURL ) ;
}
if ( ! headers . has ( 'Connection' ) && ! agent ) {
headers . set ( 'Connection' , 'close' ) ;
}
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
return Object . assign ( { } , parsedURL , {
method : request . method ,
headers : exportNodeCompatibleHeaders ( headers ) ,
agent
} ) ;
}
/ * *
* abort - error . js
*
* AbortError interface for cancelled requests
* /
/ * *
* Create AbortError instance
*
* @ param String message Error message for human
* @ return AbortError
* /
function AbortError ( message ) {
Error . call ( this , message ) ;
this . type = 'aborted' ;
this . message = message ;
// hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
}
AbortError . prototype = Object . create ( Error . prototype ) ;
AbortError . prototype . constructor = AbortError ;
AbortError . prototype . name = 'AbortError' ;
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream . PassThrough ;
const resolve _url = Url . resolve ;
/ * *
* Fetch function
*
* @ param Mixed url Absolute url or Request instance
* @ param Object opts Fetch options
* @ return Promise
* /
function fetch ( url , opts ) {
// allow custom promise
if ( ! fetch . Promise ) {
throw new Error ( 'native promise missing, set fetch.Promise to your favorite alternative' ) ;
}
Body . Promise = fetch . Promise ;
// wrap http.request into fetch
return new fetch . Promise ( function ( resolve , reject ) {
// build request object
const request = new Request ( url , opts ) ;
const options = getNodeRequestOptions ( request ) ;
const send = ( options . protocol === 'https:' ? https : http ) . request ;
const signal = request . signal ;
let response = null ;
const abort = function abort ( ) {
let error = new AbortError ( 'The user aborted a request.' ) ;
reject ( error ) ;
if ( request . body && request . body instanceof Stream . Readable ) {
request . body . destroy ( error ) ;
}
if ( ! response || ! response . body ) return ;
response . body . emit ( 'error' , error ) ;
} ;
if ( signal && signal . aborted ) {
abort ( ) ;
return ;
}
const abortAndFinalize = function abortAndFinalize ( ) {
abort ( ) ;
finalize ( ) ;
} ;
// send request
const req = send ( options ) ;
let reqTimeout ;
if ( signal ) {
signal . addEventListener ( 'abort' , abortAndFinalize ) ;
}
function finalize ( ) {
req . abort ( ) ;
if ( signal ) signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
clearTimeout ( reqTimeout ) ;
}
if ( request . timeout ) {
req . once ( 'socket' , function ( socket ) {
reqTimeout = setTimeout ( function ( ) {
reject ( new FetchError ( ` network timeout at: ${ request . url } ` , 'request-timeout' ) ) ;
finalize ( ) ;
} , request . timeout ) ;
} ) ;
}
req . on ( 'error' , function ( err ) {
reject ( new FetchError ( ` request to ${ request . url } failed, reason: ${ err . message } ` , 'system' , err ) ) ;
finalize ( ) ;
} ) ;
req . on ( 'response' , function ( res ) {
clearTimeout ( reqTimeout ) ;
const headers = createHeadersLenient ( res . headers ) ;
// HTTP fetch step 5
if ( fetch . isRedirect ( res . statusCode ) ) {
// HTTP fetch step 5.2
const location = headers . get ( 'Location' ) ;
// HTTP fetch step 5.3
const locationURL = location === null ? null : resolve _url ( request . url , location ) ;
// HTTP fetch step 5.5
switch ( request . redirect ) {
case 'error' :
2020-09-22 20:49:18 +02:00
reject ( new FetchError ( ` uri requested responds with a redirect, redirect mode is set to error: ${ request . url } ` , 'no-redirect' ) ) ;
2020-09-02 10:07:11 +02:00
finalize ( ) ;
return ;
case 'manual' :
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if ( locationURL !== null ) {
// handle corrupted header
try {
headers . set ( 'Location' , locationURL ) ;
} catch ( err ) {
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
reject ( err ) ;
}
}
break ;
case 'follow' :
// HTTP-redirect fetch step 2
if ( locationURL === null ) {
break ;
}
// HTTP-redirect fetch step 5
if ( request . counter >= request . follow ) {
reject ( new FetchError ( ` maximum redirect reached at: ${ request . url } ` , 'max-redirect' ) ) ;
finalize ( ) ;
return ;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOpts = {
headers : new Headers ( request . headers ) ,
follow : request . follow ,
counter : request . counter + 1 ,
agent : request . agent ,
compress : request . compress ,
method : request . method ,
body : request . body ,
signal : request . signal ,
2020-09-22 20:49:18 +02:00
timeout : request . timeout ,
size : request . size
2020-09-02 10:07:11 +02:00
} ;
// HTTP-redirect fetch step 9
if ( res . statusCode !== 303 && request . body && getTotalBytes ( request ) === null ) {
reject ( new FetchError ( 'Cannot follow redirect with body being a readable stream' , 'unsupported-redirect' ) ) ;
finalize ( ) ;
return ;
}
// HTTP-redirect fetch step 11
if ( res . statusCode === 303 || ( res . statusCode === 301 || res . statusCode === 302 ) && request . method === 'POST' ) {
requestOpts . method = 'GET' ;
requestOpts . body = undefined ;
requestOpts . headers . delete ( 'content-length' ) ;
}
// HTTP-redirect fetch step 15
resolve ( fetch ( new Request ( locationURL , requestOpts ) ) ) ;
finalize ( ) ;
return ;
}
}
// prepare response
res . once ( 'end' , function ( ) {
if ( signal ) signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
} ) ;
let body = res . pipe ( new PassThrough$1 ( ) ) ;
const response _options = {
url : request . url ,
status : res . statusCode ,
statusText : res . statusMessage ,
headers : headers ,
size : request . size ,
timeout : request . timeout ,
counter : request . counter
} ;
// HTTP-network fetch step 12.1.1.3
const codings = headers . get ( 'Content-Encoding' ) ;
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if ( ! request . compress || request . method === 'HEAD' || codings === null || res . statusCode === 204 || res . statusCode === 304 ) {
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush : zlib . Z _SYNC _FLUSH ,
finishFlush : zlib . Z _SYNC _FLUSH
} ;
// for gzip
if ( codings == 'gzip' || codings == 'x-gzip' ) {
body = body . pipe ( zlib . createGunzip ( zlibOptions ) ) ;
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// for deflate
if ( codings == 'deflate' || codings == 'x-deflate' ) {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res . pipe ( new PassThrough$1 ( ) ) ;
raw . once ( 'data' , function ( chunk ) {
// see http://stackoverflow.com/questions/37519828
if ( ( chunk [ 0 ] & 0x0F ) === 0x08 ) {
body = body . pipe ( zlib . createInflate ( ) ) ;
} else {
body = body . pipe ( zlib . createInflateRaw ( ) ) ;
}
response = new Response ( body , response _options ) ;
resolve ( response ) ;
} ) ;
return ;
}
// for br
if ( codings == 'br' && typeof zlib . createBrotliDecompress === 'function' ) {
body = body . pipe ( zlib . createBrotliDecompress ( ) ) ;
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// otherwise, use response as-is
response = new Response ( body , response _options ) ;
resolve ( response ) ;
} ) ;
writeToStream ( req , request ) ;
} ) ;
}
/ * *
* Redirect code matching
*
* @ param Number code Status code
* @ return Boolean
* /
fetch . isRedirect = function ( code ) {
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308 ;
} ;
// expose Promise
fetch . Promise = global . Promise ;
module . exports = exports = fetch ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . default = exports ;
exports . Headers = Headers ;
exports . Request = Request ;
exports . Response = Response ;
exports . FetchError = FetchError ;
/***/ } ) ,
/***/ 492 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var wrappy = _ _webpack _require _ _ ( 940 )
var reqs = Object . create ( null )
var once = _ _webpack _require _ _ ( 223 )
module . exports = wrappy ( inflight )
function inflight ( key , cb ) {
if ( reqs [ key ] ) {
reqs [ key ] . push ( cb )
return null
} else {
reqs [ key ] = [ cb ]
return makeres ( key )
}
}
function makeres ( key ) {
return once ( function RES ( ) {
var cbs = reqs [ key ]
var len = cbs . length
var args = slice ( arguments )
// XXX It's somewhat ambiguous whether a new callback added in this
// pass should be queued for later execution if something in the
// list of callbacks throws, or if it should just be discarded.
// However, it's such an edge case that it hardly matters, and either
// choice is likely as surprising as the other.
// As it happens, we do go ahead and schedule it for later execution.
try {
for ( var i = 0 ; i < len ; i ++ ) {
cbs [ i ] . apply ( null , args )
}
} finally {
if ( cbs . length > len ) {
// added more in the interim.
// de-zalgo, just in case, but don't call again.
cbs . splice ( 0 , len )
process . nextTick ( function ( ) {
RES . apply ( null , args )
} )
} else {
delete reqs [ key ]
2020-10-23 18:21:44 +02:00
}
}
} )
}
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
function slice ( args ) {
var length = args . length
var array = [ ]
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
for ( var i = 0 ; i < length ; i ++ ) array [ i ] = args [ i ]
return array
2020-09-02 10:07:11 +02:00
}
/***/ } ) ,
/***/ 514 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const tr = _ _importStar ( _ _webpack _require _ _ ( 159 ) ) ;
/ * *
* Exec a command .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < number > exit code
* /
function exec ( commandLine , args , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commandArgs = tr . argStringToArray ( commandLine ) ;
if ( commandArgs . length === 0 ) {
throw new Error ( ` Parameter 'commandLine' cannot be null or empty. ` ) ;
}
// Path to tool to execute should be first arg
const toolPath = commandArgs [ 0 ] ;
args = commandArgs . slice ( 1 ) . concat ( args || [ ] ) ;
const runner = new tr . ToolRunner ( toolPath , args , options ) ;
return runner . exec ( ) ;
} ) ;
}
exports . exec = exec ;
//# sourceMappingURL=exec.js.map
/***/ } ) ,
/***/ 517 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
/ * !
* Tmp
*
* Copyright ( c ) 2011 - 2017 KARASZI Istvan < github @ spam . raszi . hu >
*
* MIT Licensed
* /
/ *
* Module dependencies .
* /
const fs = _ _webpack _require _ _ ( 747 ) ;
const os = _ _webpack _require _ _ ( 87 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
2020-10-19 21:17:06 +02:00
const crypto = _ _webpack _require _ _ ( 373 ) ;
2020-09-02 10:07:11 +02:00
const _c = { fs : fs . constants , os : os . constants } ;
const rimraf = _ _webpack _require _ _ ( 959 ) ;
/ *
* The working inner variables .
* /
const
// the random characters to choose from
RANDOM _CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' ,
TEMPLATE _PATTERN = /XXXXXX/ ,
DEFAULT _TRIES = 3 ,
CREATE _FLAGS = ( _c . O _CREAT || _c . fs . O _CREAT ) | ( _c . O _EXCL || _c . fs . O _EXCL ) | ( _c . O _RDWR || _c . fs . O _RDWR ) ,
// constants are off on the windows platform and will not match the actual errno codes
IS _WIN32 = os . platform ( ) === 'win32' ,
EBADF = _c . EBADF || _c . os . errno . EBADF ,
ENOENT = _c . ENOENT || _c . os . errno . ENOENT ,
DIR _MODE = 0o700 /* 448 */ ,
FILE _MODE = 0o600 /* 384 */ ,
EXIT = 'exit' ,
// this will hold the objects need to be removed on exit
_removeObjects = [ ] ,
// API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback
FN _RMDIR _SYNC = fs . rmdirSync . bind ( fs ) ,
FN _RIMRAF _SYNC = rimraf . sync ;
let
_gracefulCleanup = false ;
/ * *
* Gets a temporary file name .
*
* @ param { ( Options | tmpNameCallback ) } options options or callback
* @ param { ? tmpNameCallback } callback the callback function
* /
function tmpName ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
try {
_assertAndSanitizeOptions ( opts ) ;
} catch ( err ) {
return cb ( err ) ;
}
let tries = opts . tries ;
( function _getUniqueName ( ) {
try {
const name = _generateTmpName ( opts ) ;
// check whether the path exists then retry if needed
fs . stat ( name , function ( err ) {
/* istanbul ignore else */
if ( ! err ) {
/* istanbul ignore else */
if ( tries -- > 0 ) return _getUniqueName ( ) ;
return cb ( new Error ( 'Could not get a unique tmp filename, max tries reached ' + name ) ) ;
}
cb ( null , name ) ;
} ) ;
} catch ( err ) {
cb ( err ) ;
}
} ( ) ) ;
}
/ * *
* Synchronous version of tmpName .
*
* @ param { Object } options
* @ returns { string } the generated random name
* @ throws { Error } if the options are invalid or could not generate a filename
* /
function tmpNameSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
_assertAndSanitizeOptions ( opts ) ;
let tries = opts . tries ;
do {
const name = _generateTmpName ( opts ) ;
try {
fs . statSync ( name ) ;
} catch ( e ) {
return name ;
}
} while ( tries -- > 0 ) ;
throw new Error ( 'Could not get a unique tmp filename, max tries reached' ) ;
}
/ * *
* Creates and opens a temporary file .
*
* @ param { ( Options | null | undefined | fileCallback ) } options the config options or the callback function or null or undefined
* @ param { ? fileCallback } callback
* /
function file ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
// gets a temporary filename
tmpName ( opts , function _tmpNameCreated ( err , name ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
// create and open the file
fs . open ( name , CREATE _FLAGS , opts . mode || FILE _MODE , function _fileCreated ( err , fd ) {
/* istanbu ignore else */
if ( err ) return cb ( err ) ;
if ( opts . discardDescriptor ) {
return fs . close ( fd , function _discardCallback ( possibleErr ) {
// the chance of getting an error on close here is rather low and might occur in the most edgiest cases only
return cb ( possibleErr , name , undefined , _prepareTmpFileRemoveCallback ( name , - 1 , opts , false ) ) ;
} ) ;
} else {
// detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care
// about the descriptor
const discardOrDetachDescriptor = opts . discardDescriptor || opts . detachDescriptor ;
cb ( null , name , fd , _prepareTmpFileRemoveCallback ( name , discardOrDetachDescriptor ? - 1 : fd , opts , false ) ) ;
}
} ) ;
} ) ;
}
/ * *
* Synchronous version of file .
*
* @ param { Options } options
* @ returns { FileSyncObject } object consists of name , fd and removeCallback
* @ throws { Error } if cannot create a file
* /
function fileSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
const discardOrDetachDescriptor = opts . discardDescriptor || opts . detachDescriptor ;
const name = tmpNameSync ( opts ) ;
var fd = fs . openSync ( name , CREATE _FLAGS , opts . mode || FILE _MODE ) ;
/* istanbul ignore else */
if ( opts . discardDescriptor ) {
fs . closeSync ( fd ) ;
fd = undefined ;
}
return {
name : name ,
fd : fd ,
removeCallback : _prepareTmpFileRemoveCallback ( name , discardOrDetachDescriptor ? - 1 : fd , opts , true )
} ;
}
/ * *
* Creates a temporary directory .
*
* @ param { ( Options | dirCallback ) } options the options or the callback function
* @ param { ? dirCallback } callback
* /
function dir ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
// gets a temporary filename
tmpName ( opts , function _tmpNameCreated ( err , name ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
// create the directory
fs . mkdir ( name , opts . mode || DIR _MODE , function _dirCreated ( err ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
cb ( null , name , _prepareTmpDirRemoveCallback ( name , opts , false ) ) ;
} ) ;
} ) ;
}
/ * *
* Synchronous version of dir .
*
* @ param { Options } options
* @ returns { DirSyncObject } object consists of name and removeCallback
* @ throws { Error } if it cannot create a directory
* /
function dirSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
const name = tmpNameSync ( opts ) ;
fs . mkdirSync ( name , opts . mode || DIR _MODE ) ;
return {
name : name ,
removeCallback : _prepareTmpDirRemoveCallback ( name , opts , true )
} ;
}
/ * *
* Removes files asynchronously .
*
* @ param { Object } fdPath
* @ param { Function } next
* @ private
* /
function _removeFileAsync ( fdPath , next ) {
const _handler = function ( err ) {
if ( err && ! _isENOENT ( err ) ) {
// reraise any unanticipated error
return next ( err ) ;
}
next ( ) ;
} ;
if ( 0 <= fdPath [ 0 ] )
fs . close ( fdPath [ 0 ] , function ( ) {
fs . unlink ( fdPath [ 1 ] , _handler ) ;
} ) ;
else fs . unlink ( fdPath [ 1 ] , _handler ) ;
}
/ * *
* Removes files synchronously .
*
* @ param { Object } fdPath
* @ private
* /
function _removeFileSync ( fdPath ) {
let rethrownException = null ;
try {
if ( 0 <= fdPath [ 0 ] ) fs . closeSync ( fdPath [ 0 ] ) ;
} catch ( e ) {
// reraise any unanticipated error
if ( ! _isEBADF ( e ) && ! _isENOENT ( e ) ) throw e ;
} finally {
try {
fs . unlinkSync ( fdPath [ 1 ] ) ;
}
catch ( e ) {
// reraise any unanticipated error
if ( ! _isENOENT ( e ) ) rethrownException = e ;
}
}
if ( rethrownException !== null ) {
throw rethrownException ;
}
}
/ * *
* Prepares the callback for removal of the temporary file .
*
* Returns either a sync callback or a async callback depending on whether
* fileSync or file was called , which is expressed by the sync parameter .
*
* @ param { string } name the path of the file
* @ param { number } fd file descriptor
* @ param { Object } opts
* @ param { boolean } sync
* @ returns { fileCallback | fileCallbackSync }
* @ private
* /
function _prepareTmpFileRemoveCallback ( name , fd , opts , sync ) {
const removeCallbackSync = _prepareRemoveCallback ( _removeFileSync , [ fd , name ] , sync ) ;
const removeCallback = _prepareRemoveCallback ( _removeFileAsync , [ fd , name ] , sync , removeCallbackSync ) ;
if ( ! opts . keep ) _removeObjects . unshift ( removeCallbackSync ) ;
return sync ? removeCallbackSync : removeCallback ;
}
/ * *
* Prepares the callback for removal of the temporary directory .
*
* Returns either a sync callback or a async callback depending on whether
* tmpFileSync or tmpFile was called , which is expressed by the sync parameter .
*
* @ param { string } name
* @ param { Object } opts
* @ param { boolean } sync
* @ returns { Function } the callback
* @ private
* /
function _prepareTmpDirRemoveCallback ( name , opts , sync ) {
const removeFunction = opts . unsafeCleanup ? rimraf : fs . rmdir . bind ( fs ) ;
const removeFunctionSync = opts . unsafeCleanup ? FN _RIMRAF _SYNC : FN _RMDIR _SYNC ;
const removeCallbackSync = _prepareRemoveCallback ( removeFunctionSync , name , sync ) ;
const removeCallback = _prepareRemoveCallback ( removeFunction , name , sync , removeCallbackSync ) ;
if ( ! opts . keep ) _removeObjects . unshift ( removeCallbackSync ) ;
return sync ? removeCallbackSync : removeCallback ;
}
/ * *
* Creates a guarded function wrapping the removeFunction call .
*
* The cleanup callback is save to be called multiple times .
* Subsequent invocations will be ignored .
*
* @ param { Function } removeFunction
* @ param { string } fileOrDirName
* @ param { boolean } sync
* @ param { cleanupCallbackSync ? } cleanupCallbackSync
* @ returns { cleanupCallback | cleanupCallbackSync }
* @ private
* /
function _prepareRemoveCallback ( removeFunction , fileOrDirName , sync , cleanupCallbackSync ) {
let called = false ;
// if sync is true, the next parameter will be ignored
return function _cleanupCallback ( next ) {
/* istanbul ignore else */
if ( ! called ) {
// remove cleanupCallback from cache
const toRemove = cleanupCallbackSync || _cleanupCallback ;
const index = _removeObjects . indexOf ( toRemove ) ;
/* istanbul ignore else */
if ( index >= 0 ) _removeObjects . splice ( index , 1 ) ;
called = true ;
if ( sync || removeFunction === FN _RMDIR _SYNC || removeFunction === FN _RIMRAF _SYNC ) {
return removeFunction ( fileOrDirName ) ;
} else {
return removeFunction ( fileOrDirName , next || function ( ) { } ) ;
}
}
} ;
}
/ * *
* The garbage collector .
*
* @ private
* /
function _garbageCollector ( ) {
/* istanbul ignore else */
if ( ! _gracefulCleanup ) return ;
// the function being called removes itself from _removeObjects,
// loop until _removeObjects is empty
while ( _removeObjects . length ) {
try {
_removeObjects [ 0 ] ( ) ;
} catch ( e ) {
// already removed?
}
}
}
/ * *
* Random name generator based on crypto .
* Adapted from http : //blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
*
* @ param { number } howMany
* @ returns { string } the generated random name
* @ private
* /
function _randomChars ( howMany ) {
let
value = [ ] ,
rnd = null ;
// make sure that we do not fail because we ran out of entropy
try {
rnd = crypto . randomBytes ( howMany ) ;
} catch ( e ) {
rnd = crypto . pseudoRandomBytes ( howMany ) ;
}
for ( var i = 0 ; i < howMany ; i ++ ) {
value . push ( RANDOM _CHARS [ rnd [ i ] % RANDOM _CHARS . length ] ) ;
}
return value . join ( '' ) ;
}
/ * *
* Helper which determines whether a string s is blank , that is undefined , or empty or null .
*
* @ private
* @ param { string } s
* @ returns { Boolean } true whether the string s is blank , false otherwise
* /
function _isBlank ( s ) {
return s === null || _isUndefined ( s ) || ! s . trim ( ) ;
}
/ * *
* Checks whether the ` obj ` parameter is defined or not .
*
* @ param { Object } obj
* @ returns { boolean } true if the object is undefined
* @ private
* /
function _isUndefined ( obj ) {
return typeof obj === 'undefined' ;
}
/ * *
* Parses the function arguments .
*
* This function helps to have optional arguments .
*
* @ param { ( Options | null | undefined | Function ) } options
* @ param { ? Function } callback
* @ returns { Array } parsed arguments
* @ private
* /
function _parseArguments ( options , callback ) {
/* istanbul ignore else */
if ( typeof options === 'function' ) {
return [ { } , options ] ;
}
/* istanbul ignore else */
if ( _isUndefined ( options ) ) {
return [ { } , callback ] ;
}
// copy options so we do not leak the changes we make internally
const actualOptions = { } ;
for ( const key of Object . getOwnPropertyNames ( options ) ) {
actualOptions [ key ] = options [ key ] ;
}
return [ actualOptions , callback ] ;
}
/ * *
* Generates a new temporary name .
*
* @ param { Object } opts
* @ returns { string } the new random name according to opts
* @ private
* /
function _generateTmpName ( opts ) {
const tmpDir = opts . tmpdir ;
/* istanbul ignore else */
if ( ! _isUndefined ( opts . name ) )
return path . join ( tmpDir , opts . dir , opts . name ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( opts . template ) )
return path . join ( tmpDir , opts . dir , opts . template ) . replace ( TEMPLATE _PATTERN , _randomChars ( 6 ) ) ;
// prefix and postfix
const name = [
opts . prefix ? opts . prefix : 'tmp' ,
'-' ,
process . pid ,
'-' ,
_randomChars ( 12 ) ,
opts . postfix ? '-' + opts . postfix : ''
] . join ( '' ) ;
return path . join ( tmpDir , opts . dir , name ) ;
}
/ * *
* Asserts whether the specified options are valid , also sanitizes options and provides sane defaults for missing
* options .
*
* @ param { Options } options
* @ private
* /
function _assertAndSanitizeOptions ( options ) {
options . tmpdir = _getTmpDir ( options ) ;
const tmpDir = options . tmpdir ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . name ) )
_assertIsRelative ( options . name , 'name' , tmpDir ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . dir ) )
_assertIsRelative ( options . dir , 'dir' , tmpDir ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . template ) ) {
_assertIsRelative ( options . template , 'template' , tmpDir ) ;
if ( ! options . template . match ( TEMPLATE _PATTERN ) )
throw new Error ( ` Invalid template, found " ${ options . template } ". ` ) ;
}
/* istanbul ignore else */
if ( ! _isUndefined ( options . tries ) && isNaN ( options . tries ) || options . tries < 0 )
throw new Error ( ` Invalid tries, found " ${ options . tries } ". ` ) ;
// if a name was specified we will try once
options . tries = _isUndefined ( options . name ) ? options . tries || DEFAULT _TRIES : 1 ;
options . keep = ! ! options . keep ;
options . detachDescriptor = ! ! options . detachDescriptor ;
options . discardDescriptor = ! ! options . discardDescriptor ;
options . unsafeCleanup = ! ! options . unsafeCleanup ;
// sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to
options . dir = _isUndefined ( options . dir ) ? '' : path . relative ( tmpDir , _resolvePath ( options . dir , tmpDir ) ) ;
options . template = _isUndefined ( options . template ) ? undefined : path . relative ( tmpDir , _resolvePath ( options . template , tmpDir ) ) ;
// sanitize further if template is relative to options.dir
options . template = _isBlank ( options . template ) ? undefined : path . relative ( options . dir , options . template ) ;
// for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to
options . name = _isUndefined ( options . name ) ? undefined : _sanitizeName ( options . name ) ;
options . prefix = _isUndefined ( options . prefix ) ? '' : options . prefix ;
options . postfix = _isUndefined ( options . postfix ) ? '' : options . postfix ;
}
/ * *
* Resolve the specified path name in respect to tmpDir .
*
* The specified name might include relative path components , e . g . . . /
* so we need to resolve in order to be sure that is is located inside tmpDir
*
* @ param name
* @ param tmpDir
* @ returns { string }
* @ private
* /
function _resolvePath ( name , tmpDir ) {
const sanitizedName = _sanitizeName ( name ) ;
if ( sanitizedName . startsWith ( tmpDir ) ) {
return path . resolve ( sanitizedName ) ;
} else {
return path . resolve ( path . join ( tmpDir , sanitizedName ) ) ;
}
}
/ * *
* Sanitize the specified path name by removing all quote characters .
*
* @ param name
* @ returns { string }
* @ private
* /
function _sanitizeName ( name ) {
if ( _isBlank ( name ) ) {
return name ;
}
return name . replace ( /["']/g , '' ) ;
}
/ * *
* Asserts whether specified name is relative to the specified tmpDir .
*
* @ param { string } name
* @ param { string } option
* @ param { string } tmpDir
* @ throws { Error }
* @ private
* /
function _assertIsRelative ( name , option , tmpDir ) {
if ( option === 'name' ) {
// assert that name is not absolute and does not contain a path
if ( path . isAbsolute ( name ) )
throw new Error ( ` ${ option } option must not contain an absolute path, found " ${ name } ". ` ) ;
// must not fail on valid .<name> or ..<name> or similar such constructs
let basename = path . basename ( name ) ;
if ( basename === '..' || basename === '.' || basename !== name )
throw new Error ( ` ${ option } option must not contain a path, found " ${ name } ". ` ) ;
}
else { // if (option === 'dir' || option === 'template') {
// assert that dir or template are relative to tmpDir
if ( path . isAbsolute ( name ) && ! name . startsWith ( tmpDir ) ) {
throw new Error ( ` ${ option } option must be relative to " ${ tmpDir } ", found " ${ name } ". ` ) ;
}
let resolvedPath = _resolvePath ( name , tmpDir ) ;
if ( ! resolvedPath . startsWith ( tmpDir ) )
throw new Error ( ` ${ option } option must be relative to " ${ tmpDir } ", found " ${ resolvedPath } ". ` ) ;
}
}
/ * *
* Helper for testing against EBADF to compensate changes made to Node 7. x under Windows .
*
* @ private
* /
function _isEBADF ( error ) {
return _isExpectedError ( error , - EBADF , 'EBADF' ) ;
}
/ * *
* Helper for testing against ENOENT to compensate changes made to Node 7. x under Windows .
*
* @ private
* /
function _isENOENT ( error ) {
return _isExpectedError ( error , - ENOENT , 'ENOENT' ) ;
}
/ * *
* Helper to determine whether the expected error code matches the actual code and errno ,
* which will differ between the supported node versions .
*
* - Node >= 7.0 :
* error . code { string }
* error . errno { number } any numerical value will be negated
*
* CAVEAT
*
* On windows , the errno for EBADF is - 4083 but os . constants . errno . EBADF is different and we must assume that ENOENT
* is no different here .
*
* @ param { SystemError } error
* @ param { number } errno
* @ param { string } code
* @ private
* /
function _isExpectedError ( error , errno , code ) {
return IS _WIN32 ? error . code === code : error . code === code && error . errno === errno ;
}
/ * *
* Sets the graceful cleanup .
*
* If graceful cleanup is set , tmp will remove all controlled temporary objects on process exit , otherwise the
* temporary objects will remain in place , waiting to be cleaned up on system restart or otherwise scheduled temporary
* object removals .
* /
function setGracefulCleanup ( ) {
_gracefulCleanup = true ;
}
/ * *
* Returns the currently configured tmp dir from os . tmpdir ( ) .
*
* @ private
* @ param { ? Options } options
* @ returns { string } the currently configured tmp dir
* /
function _getTmpDir ( options ) {
return path . resolve ( _sanitizeName ( options && options . tmpdir || os . tmpdir ( ) ) ) ;
}
// Install process exit listener
process . addListener ( EXIT , _garbageCollector ) ;
/ * *
* Configuration options .
*
* @ typedef { Object } Options
* @ property { ? boolean } keep the temporary object ( file or dir ) will not be garbage collected
* @ property { ? number } tries the number of tries before give up the name generation
* @ property ( ? int ) mode the access mode , defaults are 0o700 for directories and 0o600 for files
* @ property { ? string } template the "mkstemp" like filename template
* @ property { ? string } name fixed name relative to tmpdir or the specified dir option
* @ property { ? string } dir tmp directory relative to the root tmp directory in use
* @ property { ? string } prefix prefix for the generated name
* @ property { ? string } postfix postfix for the generated name
* @ property { ? string } tmpdir the root tmp directory which overrides the os tmpdir
* @ property { ? boolean } unsafeCleanup recursively removes the created temporary directory , even when it ' s not empty
* @ property { ? boolean } detachDescriptor detaches the file descriptor , caller is responsible for closing the file , tmp will no longer try closing the file during garbage collection
* @ property { ? boolean } discardDescriptor discards the file descriptor ( closes file , fd is - 1 ) , tmp will no longer try closing the file during garbage collection
* /
/ * *
* @ typedef { Object } FileSyncObject
* @ property { string } name the name of the file
* @ property { string } fd the file descriptor or - 1 if the fd has been discarded
* @ property { fileCallback } removeCallback the callback function to remove the file
* /
/ * *
* @ typedef { Object } DirSyncObject
* @ property { string } name the name of the directory
* @ property { fileCallback } removeCallback the callback function to remove the directory
* /
/ * *
* @ callback tmpNameCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* /
/ * *
* @ callback fileCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { number } fd the file descriptor or - 1 if the fd had been discarded
* @ param { cleanupCallback } fn the cleanup callback function
* /
/ * *
* @ callback fileCallbackSync
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { number } fd the file descriptor or - 1 if the fd had been discarded
* @ param { cleanupCallbackSync } fn the cleanup callback function
* /
/ * *
* @ callback dirCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { cleanupCallback } fn the cleanup callback function
* /
/ * *
* @ callback dirCallbackSync
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { cleanupCallbackSync } fn the cleanup callback function
* /
/ * *
* Removes the temporary created file or directory .
*
* @ callback cleanupCallback
* @ param { simpleCallback } [ next ] function to call whenever the tmp object needs to be removed
* /
/ * *
* Removes the temporary created file or directory .
*
* @ callback cleanupCallbackSync
* /
/ * *
* Callback function for function composition .
* @ see { @ link https : //github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
*
* @ callback simpleCallback
* /
// exporting all the needed methods
// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will
// allow users to reconfigure the temporary directory
Object . defineProperty ( module . exports , 'tmpdir' , {
enumerable : true ,
configurable : false ,
get : function ( ) {
return _getTmpDir ( ) ;
}
} ) ;
module . exports . dir = dir ;
module . exports . dirSync = dirSync ;
module . exports . file = file ;
module . exports . fileSync = fileSync ;
module . exports . tmpName = tmpName ;
module . exports . tmpNameSync = tmpNameSync ;
module . exports . setGracefulCleanup = setGracefulCleanup ;
/***/ } ) ,
/***/ 520 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const lte = ( a , b , loose ) => compare ( a , b , loose ) <= 0
module . exports = lte
/***/ } ) ,
/***/ 522 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const gte = ( a , b , loose ) => compare ( a , b , loose ) >= 0
module . exports = gte
/***/ } ) ,
/***/ 523 :
/***/ ( function ( module , exports , _ _webpack _require _ _ ) {
const { MAX _SAFE _COMPONENT _LENGTH } = _ _webpack _require _ _ ( 293 )
const debug = _ _webpack _require _ _ ( 427 )
exports = module . exports = { }
// The actual regexps go on exports.re
const re = exports . re = [ ]
const src = exports . src = [ ]
const t = exports . t = { }
let R = 0
const createToken = ( name , value , isGlobal ) => {
const index = R ++
debug ( index , value )
t [ name ] = index
src [ index ] = value
re [ index ] = new RegExp ( value , isGlobal ? 'g' : undefined )
}
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
createToken ( 'NUMERICIDENTIFIER' , '0|[1-9]\\d*' )
createToken ( 'NUMERICIDENTIFIERLOOSE' , '[0-9]+' )
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
createToken ( 'NONNUMERICIDENTIFIER' , '\\d*[a-zA-Z-][a-zA-Z0-9-]*' )
// ## Main Version
// Three dot-separated numeric identifiers.
createToken ( 'MAINVERSION' , ` ( ${ src [ t . NUMERICIDENTIFIER ] } ) \\ . ` +
` ( ${ src [ t . NUMERICIDENTIFIER ] } ) \\ . ` +
` ( ${ src [ t . NUMERICIDENTIFIER ] } ) ` )
createToken ( 'MAINVERSIONLOOSE' , ` ( ${ src [ t . NUMERICIDENTIFIERLOOSE ] } ) \\ . ` +
` ( ${ src [ t . NUMERICIDENTIFIERLOOSE ] } ) \\ . ` +
` ( ${ src [ t . NUMERICIDENTIFIERLOOSE ] } ) ` )
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
createToken ( 'PRERELEASEIDENTIFIER' , ` (?: ${ src [ t . NUMERICIDENTIFIER ]
} | $ { src [ t . NONNUMERICIDENTIFIER ] } ) ` )
createToken ( 'PRERELEASEIDENTIFIERLOOSE' , ` (?: ${ src [ t . NUMERICIDENTIFIERLOOSE ]
} | $ { src [ t . NONNUMERICIDENTIFIER ] } ) ` )
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
createToken ( 'PRERELEASE' , ` (?:-( ${ src [ t . PRERELEASEIDENTIFIER ]
} ( ? : \ \ . $ { src [ t . PRERELEASEIDENTIFIER ] } ) * ) ) ` )
createToken ( 'PRERELEASELOOSE' , ` (?:-?( ${ src [ t . PRERELEASEIDENTIFIERLOOSE ]
} ( ? : \ \ . $ { src [ t . PRERELEASEIDENTIFIERLOOSE ] } ) * ) ) ` )
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
createToken ( 'BUILDIDENTIFIER' , '[0-9A-Za-z-]+' )
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
createToken ( 'BUILD' , ` (?: \\ +( ${ src [ t . BUILDIDENTIFIER ]
} ( ? : \ \ . $ { src [ t . BUILDIDENTIFIER ] } ) * ) ) ` )
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
createToken ( 'FULLPLAIN' , ` v? ${ src [ t . MAINVERSION ]
} $ { src [ t . PRERELEASE ] } ? $ {
src [ t . BUILD ] } ? ` )
createToken ( 'FULL' , ` ^ ${ src [ t . FULLPLAIN ] } $ ` )
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
createToken ( 'LOOSEPLAIN' , ` [v= \\ s]* ${ src [ t . MAINVERSIONLOOSE ]
} $ { src [ t . PRERELEASELOOSE ] } ? $ {
src [ t . BUILD ] } ? ` )
createToken ( 'LOOSE' , ` ^ ${ src [ t . LOOSEPLAIN ] } $ ` )
createToken ( 'GTLT' , '((?:<|>)?=?)' )
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
createToken ( 'XRANGEIDENTIFIERLOOSE' , ` ${ src [ t . NUMERICIDENTIFIERLOOSE ] } |x|X| \\ * ` )
createToken ( 'XRANGEIDENTIFIER' , ` ${ src [ t . NUMERICIDENTIFIER ] } |x|X| \\ * ` )
createToken ( 'XRANGEPLAIN' , ` [v= \\ s]*( ${ src [ t . XRANGEIDENTIFIER ] } ) ` +
` (?: \\ .( ${ src [ t . XRANGEIDENTIFIER ] } ) ` +
` (?: \\ .( ${ src [ t . XRANGEIDENTIFIER ] } ) ` +
` (?: ${ src [ t . PRERELEASE ] } )? ${
src [ t . BUILD ] } ? ` +
` )?)? ` )
createToken ( 'XRANGEPLAINLOOSE' , ` [v= \\ s]*( ${ src [ t . XRANGEIDENTIFIERLOOSE ] } ) ` +
` (?: \\ .( ${ src [ t . XRANGEIDENTIFIERLOOSE ] } ) ` +
` (?: \\ .( ${ src [ t . XRANGEIDENTIFIERLOOSE ] } ) ` +
` (?: ${ src [ t . PRERELEASELOOSE ] } )? ${
src [ t . BUILD ] } ? ` +
` )?)? ` )
createToken ( 'XRANGE' , ` ^ ${ src [ t . GTLT ] } \\ s* ${ src [ t . XRANGEPLAIN ] } $ ` )
createToken ( 'XRANGELOOSE' , ` ^ ${ src [ t . GTLT ] } \\ s* ${ src [ t . XRANGEPLAINLOOSE ] } $ ` )
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
createToken ( 'COERCE' , ` ${ '(^|[^\\d])' +
'(\\d{1,' } $ { MAX _SAFE _COMPONENT _LENGTH } } ) ` +
` (?: \\ .( \\ d{1, ${ MAX _SAFE _COMPONENT _LENGTH } }))? ` +
` (?: \\ .( \\ d{1, ${ MAX _SAFE _COMPONENT _LENGTH } }))? ` +
` (?: $ |[^ \\ d]) ` )
createToken ( 'COERCERTL' , src [ t . COERCE ] , true )
// Tilde ranges.
// Meaning is "reasonably at or greater than"
createToken ( 'LONETILDE' , '(?:~>?)' )
createToken ( 'TILDETRIM' , ` ( \\ s*) ${ src [ t . LONETILDE ] } \\ s+ ` , true )
exports . tildeTrimReplace = '$1~'
createToken ( 'TILDE' , ` ^ ${ src [ t . LONETILDE ] } ${ src [ t . XRANGEPLAIN ] } $ ` )
createToken ( 'TILDELOOSE' , ` ^ ${ src [ t . LONETILDE ] } ${ src [ t . XRANGEPLAINLOOSE ] } $ ` )
// Caret ranges.
// Meaning is "at least and backwards compatible with"
createToken ( 'LONECARET' , '(?:\\^)' )
createToken ( 'CARETTRIM' , ` ( \\ s*) ${ src [ t . LONECARET ] } \\ s+ ` , true )
exports . caretTrimReplace = '$1^'
createToken ( 'CARET' , ` ^ ${ src [ t . LONECARET ] } ${ src [ t . XRANGEPLAIN ] } $ ` )
createToken ( 'CARETLOOSE' , ` ^ ${ src [ t . LONECARET ] } ${ src [ t . XRANGEPLAINLOOSE ] } $ ` )
// A simple gt/lt/eq thing, or just "" to indicate "any version"
createToken ( 'COMPARATORLOOSE' , ` ^ ${ src [ t . GTLT ] } \\ s*( ${ src [ t . LOOSEPLAIN ] } ) $ |^ $ ` )
createToken ( 'COMPARATOR' , ` ^ ${ src [ t . GTLT ] } \\ s*( ${ src [ t . FULLPLAIN ] } ) $ |^ $ ` )
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
createToken ( 'COMPARATORTRIM' , ` ( \\ s*) ${ src [ t . GTLT ]
} \ \ s * ( $ { src [ t . LOOSEPLAIN ] } | $ { src [ t . XRANGEPLAIN ] } ) ` , true)
exports . comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
createToken ( 'HYPHENRANGE' , ` ^ \\ s*( ${ src [ t . XRANGEPLAIN ] } ) ` +
` \\ s+- \\ s+ ` +
` ( ${ src [ t . XRANGEPLAIN ] } ) ` +
` \\ s* $ ` )
createToken ( 'HYPHENRANGELOOSE' , ` ^ \\ s*( ${ src [ t . XRANGEPLAINLOOSE ] } ) ` +
` \\ s+- \\ s+ ` +
` ( ${ src [ t . XRANGEPLAINLOOSE ] } ) ` +
` \\ s* $ ` )
// Star ranges basically just allow anything at all.
createToken ( 'STAR' , '(<|>)?=?\\s*\\*' )
// >=0.0.0 is like a star
createToken ( 'GTE0' , '^\\s*>=\\s*0\.0\.0\\s*$' )
createToken ( 'GTE0PRE' , '^\\s*>=\\s*0\.0\.0-0\\s*$' )
/***/ } ) ,
/***/ 532 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const ANY = Symbol ( 'SemVer ANY' )
// hoisted class for cyclic dependency
class Comparator {
static get ANY ( ) {
return ANY
}
constructor ( comp , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( comp instanceof Comparator ) {
if ( comp . loose === ! ! options . loose ) {
return comp
} else {
comp = comp . value
}
}
debug ( 'comparator' , comp , options )
this . options = options
this . loose = ! ! options . loose
this . parse ( comp )
if ( this . semver === ANY ) {
this . value = ''
} else {
this . value = this . operator + this . semver . version
}
debug ( 'comp' , this )
}
parse ( comp ) {
const r = this . options . loose ? re [ t . COMPARATORLOOSE ] : re [ t . COMPARATOR ]
const m = comp . match ( r )
if ( ! m ) {
throw new TypeError ( ` Invalid comparator: ${ comp } ` )
}
this . operator = m [ 1 ] !== undefined ? m [ 1 ] : ''
if ( this . operator === '=' ) {
this . operator = ''
}
// if it literally is just '>' or '' then allow anything.
if ( ! m [ 2 ] ) {
this . semver = ANY
} else {
this . semver = new SemVer ( m [ 2 ] , this . options . loose )
}
}
toString ( ) {
return this . value
}
test ( version ) {
debug ( 'Comparator.test' , version , this . options . loose )
if ( this . semver === ANY || version === ANY ) {
return true
}
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
}
return cmp ( version , this . operator , this . semver , this . options )
}
intersects ( comp , options ) {
if ( ! ( comp instanceof Comparator ) ) {
throw new TypeError ( 'a Comparator is required' )
}
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( this . operator === '' ) {
if ( this . value === '' ) {
return true
}
return new Range ( comp . value , options ) . test ( this . value )
} else if ( comp . operator === '' ) {
if ( comp . value === '' ) {
return true
}
return new Range ( this . value , options ) . test ( comp . semver )
}
const sameDirectionIncreasing =
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '>=' || comp . operator === '>' )
const sameDirectionDecreasing =
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '<=' || comp . operator === '<' )
const sameSemVer = this . semver . version === comp . semver . version
const differentDirectionsInclusive =
( this . operator === '>=' || this . operator === '<=' ) &&
( comp . operator === '>=' || comp . operator === '<=' )
const oppositeDirectionsLessThan =
cmp ( this . semver , '<' , comp . semver , options ) &&
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '<=' || comp . operator === '<' )
const oppositeDirectionsGreaterThan =
cmp ( this . semver , '>' , comp . semver , options ) &&
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '>=' || comp . operator === '>' )
return (
sameDirectionIncreasing ||
sameDirectionDecreasing ||
( sameSemVer && differentDirectionsInclusive ) ||
oppositeDirectionsLessThan ||
oppositeDirectionsGreaterThan
)
}
}
module . exports = Comparator
const { re , t } = _ _webpack _require _ _ ( 523 )
const cmp = _ _webpack _require _ _ ( 98 )
const debug = _ _webpack _require _ _ ( 427 )
const SemVer = _ _webpack _require _ _ ( 88 )
const Range = _ _webpack _require _ _ ( 828 )
/***/ } ) ,
/***/ 537 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var deprecation = _ _webpack _require _ _ ( 932 ) ;
var once = _interopDefault ( _ _webpack _require _ _ ( 223 ) ) ;
const logOnce = once ( deprecation => console . warn ( deprecation ) ) ;
/ * *
* Error with extra properties to help with debugging
* /
class RequestError extends Error {
constructor ( message , statusCode , options ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = "HttpError" ;
this . status = statusCode ;
Object . defineProperty ( this , "code" , {
get ( ) {
logOnce ( new deprecation . Deprecation ( "[@octokit/request-error] `error.code` is deprecated, use `error.status`." ) ) ;
return statusCode ;
}
} ) ;
this . headers = options . headers || { } ; // redact request credentials without mutating original request options
const requestCopy = Object . assign ( { } , options . request ) ;
if ( options . request . headers . authorization ) {
requestCopy . headers = Object . assign ( { } , options . request . headers , {
authorization : options . request . headers . authorization . replace ( / .*$/ , " [REDACTED]" )
} ) ;
}
requestCopy . url = requestCopy . url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
. replace ( /\bclient_secret=\w+/g , "client_secret=[REDACTED]" ) // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
. replace ( /\baccess_token=\w+/g , "access_token=[REDACTED]" ) ;
this . request = requestCopy ;
}
}
exports . RequestError = RequestError ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 544 :
/***/ ( function ( module ) {
if ( typeof Object . create === 'function' ) {
// implementation from standard node.js 'util' module
module . exports = function inherits ( ctor , superCtor ) {
if ( superCtor ) {
ctor . super _ = superCtor
ctor . prototype = Object . create ( superCtor . prototype , {
constructor : {
value : ctor ,
enumerable : false ,
writable : true ,
configurable : true
}
} )
}
} ;
} else {
// old school shim for old browsers
module . exports = function inherits ( ctor , superCtor ) {
if ( superCtor ) {
ctor . super _ = superCtor
var TempCtor = function ( ) { }
TempCtor . prototype = superCtor . prototype
ctor . prototype = new TempCtor ( )
ctor . prototype . constructor = ctor
}
}
}
/***/ } ) ,
/***/ 549 :
/***/ ( function ( module ) {
module . exports = addHook
function addHook ( state , kind , name , hook ) {
var orig = hook
if ( ! state . registry [ name ] ) {
state . registry [ name ] = [ ]
}
if ( kind === 'before' ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( orig . bind ( null , options ) )
. then ( method . bind ( null , options ) )
}
}
if ( kind === 'after' ) {
hook = function ( method , options ) {
var result
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. then ( function ( result _ ) {
result = result _
return orig ( result , options )
} )
. then ( function ( ) {
return result
} )
}
}
if ( kind === 'error' ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. catch ( function ( error ) {
return orig ( error , options )
} )
}
}
state . registry [ name ] . push ( {
hook : hook ,
orig : orig
} )
}
/***/ } ) ,
/***/ 561 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// given a set of versions and a range, create a "simplified" range
// that includes the same versions that the original range does
// If the original range is shorter than the simplified one, return that.
const satisfies = _ _webpack _require _ _ ( 55 )
const compare = _ _webpack _require _ _ ( 309 )
module . exports = ( versions , range , options ) => {
const set = [ ]
let min = null
let prev = null
const v = versions . sort ( ( a , b ) => compare ( a , b , options ) )
for ( const version of v ) {
const included = satisfies ( version , range , options )
if ( included ) {
prev = version
if ( ! min )
min = version
} else {
if ( prev ) {
set . push ( [ min , prev ] )
}
prev = null
min = null
}
}
if ( min )
set . push ( [ min , null ] )
const ranges = [ ]
for ( const [ min , max ] of set ) {
if ( min === max )
ranges . push ( min )
else if ( ! max && min === v [ 0 ] )
ranges . push ( '*' )
else if ( ! max )
ranges . push ( ` >= ${ min } ` )
else if ( min === v [ 0 ] )
ranges . push ( ` <= ${ max } ` )
else
ranges . push ( ` ${ min } - ${ max } ` )
}
const simplified = ranges . join ( ' || ' )
const original = typeof range . raw === 'string' ? range . raw : String ( range )
return simplified . length < original . length ? simplified : range
}
/***/ } ) ,
/***/ 579 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const Range = _ _webpack _require _ _ ( 828 )
const maxSatisfying = ( versions , range , options ) => {
let max = null
let maxSV = null
let rangeObj = null
try {
rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( ( v ) => {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! max || maxSV . compare ( v ) === - 1 ) {
// compare(max, v, true)
max = v
maxSV = new SemVer ( max , options )
}
}
} )
return max
}
module . exports = maxSatisfying
/***/ } ) ,
/***/ 601 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const parse = _ _webpack _require _ _ ( 925 )
const valid = ( version , options ) => {
const v = parse ( version , options )
return v ? v . version : null
}
module . exports = valid
/***/ } ) ,
/***/ 605 :
/***/ ( function ( module ) {
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 614 :
/***/ ( function ( module ) {
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 622 :
/***/ ( function ( module ) {
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 625 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
exports . alphasort = alphasort
exports . alphasorti = alphasorti
exports . setopts = setopts
exports . ownProp = ownProp
exports . makeAbs = makeAbs
exports . finish = finish
exports . mark = mark
exports . isIgnored = isIgnored
exports . childrenIgnored = childrenIgnored
function ownProp ( obj , field ) {
return Object . prototype . hasOwnProperty . call ( obj , field )
}
var path = _ _webpack _require _ _ ( 622 )
var minimatch = _ _webpack _require _ _ ( 973 )
var isAbsolute = _ _webpack _require _ _ ( 714 )
var Minimatch = minimatch . Minimatch
function alphasorti ( a , b ) {
return a . toLowerCase ( ) . localeCompare ( b . toLowerCase ( ) )
}
function alphasort ( a , b ) {
return a . localeCompare ( b )
}
function setupIgnores ( self , options ) {
self . ignore = options . ignore || [ ]
if ( ! Array . isArray ( self . ignore ) )
self . ignore = [ self . ignore ]
if ( self . ignore . length ) {
self . ignore = self . ignore . map ( ignoreMap )
}
}
// ignore patterns are always in dot:true mode.
function ignoreMap ( pattern ) {
var gmatcher = null
if ( pattern . slice ( - 3 ) === '/**' ) {
var gpattern = pattern . replace ( /(\/\*\*)+$/ , '' )
gmatcher = new Minimatch ( gpattern , { dot : true } )
}
return {
matcher : new Minimatch ( pattern , { dot : true } ) ,
gmatcher : gmatcher
}
}
function setopts ( self , pattern , options ) {
if ( ! options )
options = { }
// base-matching: just use globstar for that.
if ( options . matchBase && - 1 === pattern . indexOf ( "/" ) ) {
if ( options . noglobstar ) {
throw new Error ( "base matching requires globstar" )
}
pattern = "**/" + pattern
}
self . silent = ! ! options . silent
self . pattern = pattern
self . strict = options . strict !== false
self . realpath = ! ! options . realpath
self . realpathCache = options . realpathCache || Object . create ( null )
self . follow = ! ! options . follow
self . dot = ! ! options . dot
self . mark = ! ! options . mark
self . nodir = ! ! options . nodir
if ( self . nodir )
self . mark = true
self . sync = ! ! options . sync
self . nounique = ! ! options . nounique
self . nonull = ! ! options . nonull
self . nosort = ! ! options . nosort
self . nocase = ! ! options . nocase
self . stat = ! ! options . stat
self . noprocess = ! ! options . noprocess
self . absolute = ! ! options . absolute
self . maxLength = options . maxLength || Infinity
self . cache = options . cache || Object . create ( null )
self . statCache = options . statCache || Object . create ( null )
self . symlinks = options . symlinks || Object . create ( null )
setupIgnores ( self , options )
self . changedCwd = false
var cwd = process . cwd ( )
if ( ! ownProp ( options , "cwd" ) )
self . cwd = cwd
else {
self . cwd = path . resolve ( options . cwd )
self . changedCwd = self . cwd !== cwd
}
self . root = options . root || path . resolve ( self . cwd , "/" )
self . root = path . resolve ( self . root )
if ( process . platform === "win32" )
self . root = self . root . replace ( /\\/g , "/" )
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
self . cwdAbs = isAbsolute ( self . cwd ) ? self . cwd : makeAbs ( self , self . cwd )
if ( process . platform === "win32" )
self . cwdAbs = self . cwdAbs . replace ( /\\/g , "/" )
self . nomount = ! ! options . nomount
// disable comments and negation in Minimatch.
// Note that they are not supported in Glob itself anyway.
options . nonegate = true
options . nocomment = true
self . minimatch = new Minimatch ( pattern , options )
self . options = self . minimatch . options
}
function finish ( self ) {
var nou = self . nounique
var all = nou ? [ ] : Object . create ( null )
for ( var i = 0 , l = self . matches . length ; i < l ; i ++ ) {
var matches = self . matches [ i ]
if ( ! matches || Object . keys ( matches ) . length === 0 ) {
if ( self . nonull ) {
// do like the shell, and spit out the literal glob
var literal = self . minimatch . globSet [ i ]
if ( nou )
all . push ( literal )
else
all [ literal ] = true
}
} else {
// had matches
var m = Object . keys ( matches )
if ( nou )
all . push . apply ( all , m )
else
m . forEach ( function ( m ) {
all [ m ] = true
} )
}
}
if ( ! nou )
all = Object . keys ( all )
if ( ! self . nosort )
all = all . sort ( self . nocase ? alphasorti : alphasort )
// at *some* point we statted all of these
if ( self . mark ) {
for ( var i = 0 ; i < all . length ; i ++ ) {
all [ i ] = self . _mark ( all [ i ] )
}
if ( self . nodir ) {
all = all . filter ( function ( e ) {
var notDir = ! ( /\/$/ . test ( e ) )
var c = self . cache [ e ] || self . cache [ makeAbs ( self , e ) ]
if ( notDir && c )
notDir = c !== 'DIR' && ! Array . isArray ( c )
return notDir
} )
}
}
if ( self . ignore . length )
all = all . filter ( function ( m ) {
return ! isIgnored ( self , m )
} )
self . found = all
}
function mark ( self , p ) {
var abs = makeAbs ( self , p )
var c = self . cache [ abs ]
var m = p
if ( c ) {
var isDir = c === 'DIR' || Array . isArray ( c )
var slash = p . slice ( - 1 ) === '/'
if ( isDir && ! slash )
m += '/'
else if ( ! isDir && slash )
m = m . slice ( 0 , - 1 )
if ( m !== p ) {
var mabs = makeAbs ( self , m )
self . statCache [ mabs ] = self . statCache [ abs ]
self . cache [ mabs ] = self . cache [ abs ]
}
}
return m
}
// lotta situps...
function makeAbs ( self , f ) {
var abs = f
if ( f . charAt ( 0 ) === '/' ) {
abs = path . join ( self . root , f )
} else if ( isAbsolute ( f ) || f === '' ) {
abs = f
} else if ( self . changedCwd ) {
abs = path . resolve ( self . cwd , f )
} else {
abs = path . resolve ( f )
}
if ( process . platform === 'win32' )
abs = abs . replace ( /\\/g , '/' )
return abs
}
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
function isIgnored ( self , path ) {
if ( ! self . ignore . length )
return false
return self . ignore . some ( function ( item ) {
return item . matcher . match ( path ) || ! ! ( item . gmatcher && item . gmatcher . match ( path ) )
} )
}
function childrenIgnored ( self , path ) {
if ( ! self . ignore . length )
return false
return self . ignore . some ( function ( item ) {
return ! ! ( item . gmatcher && item . gmatcher . match ( path ) )
} )
}
/***/ } ) ,
/***/ 631 :
/***/ ( function ( module ) {
module . exports = require ( "net" ) ;
/***/ } ) ,
/***/ 647 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . setTmpDir = exports . tmpDir = exports . IsPost = void 0 ;
const core = _ _importStar ( _ _webpack _require _ _ ( 186 ) ) ;
exports . IsPost = ! ! process . env [ 'STATE_isPost' ] ;
exports . tmpDir = process . env [ 'STATE_tmpDir' ] || '' ;
function setTmpDir ( tmpDir ) {
core . saveState ( 'tmpDir' , tmpDir ) ;
}
exports . setTmpDir = setTmpDir ;
if ( ! exports . IsPost ) {
core . saveState ( 'isPost' , 'true' ) ;
}
//# sourceMappingURL=state-helper.js.map
/***/ } ) ,
/***/ 668 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
var request = _ _webpack _require _ _ ( 234 ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
const VERSION = "4.5.4" ;
class GraphqlError extends Error {
constructor ( request , response ) {
const message = response . data . errors [ 0 ] . message ;
super ( message ) ;
Object . assign ( this , response . data ) ;
Object . assign ( this , {
headers : response . headers
} ) ;
this . name = "GraphqlError" ;
this . request = request ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
}
}
const NON _VARIABLE _OPTIONS = [ "method" , "baseUrl" , "url" , "headers" , "request" , "query" , "mediaType" ] ;
function graphql ( request , query , options ) {
options = typeof query === "string" ? options = Object . assign ( {
query
} , options ) : options = query ;
const requestOptions = Object . keys ( options ) . reduce ( ( result , key ) => {
if ( NON _VARIABLE _OPTIONS . includes ( key ) ) {
result [ key ] = options [ key ] ;
return result ;
}
if ( ! result . variables ) {
result . variables = { } ;
}
result . variables [ key ] = options [ key ] ;
return result ;
} , { } ) ;
return request ( requestOptions ) . then ( response => {
if ( response . data . errors ) {
const headers = { } ;
for ( const key of Object . keys ( response . headers ) ) {
headers [ key ] = response . headers [ key ] ;
}
throw new GraphqlError ( requestOptions , {
headers ,
data : response . data
} ) ;
}
return response . data . data ;
} ) ;
}
function withDefaults ( request$1 , newDefaults ) {
const newRequest = request$1 . defaults ( newDefaults ) ;
const newApi = ( query , options ) => {
return graphql ( newRequest , query , options ) ;
} ;
return Object . assign ( newApi , {
defaults : withDefaults . bind ( null , newRequest ) ,
endpoint : request . request . endpoint
} ) ;
}
const graphql$1 = withDefaults ( request . request , {
headers : {
"user-agent" : ` octokit-graphql.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } `
} ,
method : "POST" ,
url : "/graphql"
} ) ;
function withCustomRequest ( customRequest ) {
return withDefaults ( customRequest , {
method : "POST" ,
url : "/graphql"
} ) ;
}
exports . graphql = graphql$1 ;
exports . withCustomRequest = withCustomRequest ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 669 :
/***/ ( function ( module ) {
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 670 :
/***/ ( function ( module ) {
module . exports = register
function register ( state , name , method , options ) {
if ( typeof method !== 'function' ) {
throw new Error ( 'method for before hook must be a function' )
}
if ( ! options ) {
options = { }
}
if ( Array . isArray ( name ) ) {
return name . reverse ( ) . reduce ( function ( callback , name ) {
return register . bind ( null , state , name , callback , options )
} , method ) ( )
}
return Promise . resolve ( )
. then ( function ( ) {
if ( ! state . registry [ name ] ) {
return method ( options )
}
return ( state . registry [ name ] ) . reduce ( function ( method , registered ) {
return registered . hook . bind ( null , method , options )
} , method ) ( )
} )
}
/***/ } ) ,
/***/ 682 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var register = _ _webpack _require _ _ ( 670 )
var addHook = _ _webpack _require _ _ ( 549 )
var removeHook = _ _webpack _require _ _ ( 819 )
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function . bind
var bindable = bind . bind ( bind )
function bindApi ( hook , state , name ) {
var removeHookRef = bindable ( removeHook , null ) . apply ( null , name ? [ state , name ] : [ state ] )
hook . api = { remove : removeHookRef }
hook . remove = removeHookRef
2020-10-23 18:21:44 +02:00
; [ 'before' , 'error' , 'after' , 'wrap' ] . forEach ( function ( kind ) {
var args = name ? [ state , kind , name ] : [ state , kind ]
hook [ kind ] = hook . api [ kind ] = bindable ( addHook , null ) . apply ( null , args )
} )
2020-09-02 10:07:11 +02:00
}
2020-10-23 18:21:44 +02:00
function HookSingular ( ) {
var singularHookName = 'h'
var singularHookState = {
registry : { }
}
var singularHook = register . bind ( null , singularHookState , singularHookName )
bindApi ( singularHook , singularHookState , singularHookName )
return singularHook
}
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
function HookCollection ( ) {
var state = {
registry : { }
}
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
var hook = register . bind ( null , state )
bindApi ( hook , state )
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
return hook
}
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
var collectionHookDeprecationMessageDisplayed = false
function Hook ( ) {
if ( ! collectionHookDeprecationMessageDisplayed ) {
console . warn ( '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' )
collectionHookDeprecationMessageDisplayed = true
}
return HookCollection ( )
}
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
Hook . Singular = HookSingular . bind ( )
Hook . Collection = HookCollection . bind ( )
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
module . exports = Hook
// expose constructors as a named property for TypeScript
module . exports . Hook = Hook
module . exports . Singular = Hook . Singular
module . exports . Collection = Hook . Collection
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
/***/ 688 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
2020-10-23 18:21:44 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
const major = ( a , loose ) => new SemVer ( a , loose ) . major
module . exports = major
2020-09-02 10:07:11 +02:00
/***/ } ) ,
/***/ 701 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compareBuild = _ _webpack _require _ _ ( 156 )
const rsort = ( list , loose ) => list . sort ( ( a , b ) => compareBuild ( b , a , loose ) )
module . exports = rsort
/***/ } ) ,
/***/ 706 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
// Mostly just for testing and legacy API reasons
const toComparators = ( range , options ) =>
new Range ( range , options ) . set
. map ( comp => comp . map ( c => c . value ) . join ( ' ' ) . trim ( ) . split ( ' ' ) )
module . exports = toComparators
/***/ } ) ,
/***/ 714 :
/***/ ( function ( module ) {
"use strict" ;
function posix ( path ) {
return path . charAt ( 0 ) === '/' ;
}
function win32 ( path ) {
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/ ;
var result = splitDeviceRe . exec ( path ) ;
var device = result [ 1 ] || '' ;
var isUnc = Boolean ( device && device . charAt ( 1 ) !== ':' ) ;
// UNC paths are always absolute
return Boolean ( result [ 2 ] || isUnc ) ;
}
module . exports = process . platform === 'win32' ? win32 : posix ;
module . exports . posix = posix ;
module . exports . win32 = win32 ;
/***/ } ) ,
/***/ 717 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var concatMap = _ _webpack _require _ _ ( 891 ) ;
var balanced = _ _webpack _require _ _ ( 760 ) ;
module . exports = expandTop ;
var escSlash = '\0SLASH' + Math . random ( ) + '\0' ;
var escOpen = '\0OPEN' + Math . random ( ) + '\0' ;
var escClose = '\0CLOSE' + Math . random ( ) + '\0' ;
var escComma = '\0COMMA' + Math . random ( ) + '\0' ;
var escPeriod = '\0PERIOD' + Math . random ( ) + '\0' ;
function numeric ( str ) {
return parseInt ( str , 10 ) == str
? parseInt ( str , 10 )
: str . charCodeAt ( 0 ) ;
}
function escapeBraces ( str ) {
return str . split ( '\\\\' ) . join ( escSlash )
. split ( '\\{' ) . join ( escOpen )
. split ( '\\}' ) . join ( escClose )
. split ( '\\,' ) . join ( escComma )
. split ( '\\.' ) . join ( escPeriod ) ;
}
function unescapeBraces ( str ) {
return str . split ( escSlash ) . join ( '\\' )
. split ( escOpen ) . join ( '{' )
. split ( escClose ) . join ( '}' )
. split ( escComma ) . join ( ',' )
. split ( escPeriod ) . join ( '.' ) ;
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts ( str ) {
if ( ! str )
return [ '' ] ;
var parts = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m )
return str . split ( ',' ) ;
var pre = m . pre ;
var body = m . body ;
var post = m . post ;
var p = pre . split ( ',' ) ;
p [ p . length - 1 ] += '{' + body + '}' ;
var postParts = parseCommaParts ( post ) ;
if ( post . length ) {
p [ p . length - 1 ] += postParts . shift ( ) ;
p . push . apply ( p , postParts ) ;
}
parts . push . apply ( parts , p ) ;
return parts ;
}
function expandTop ( str ) {
if ( ! str )
return [ ] ;
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if ( str . substr ( 0 , 2 ) === '{}' ) {
str = '\\{\\}' + str . substr ( 2 ) ;
}
return expand ( escapeBraces ( str ) , true ) . map ( unescapeBraces ) ;
}
function identity ( e ) {
return e ;
}
function embrace ( str ) {
return '{' + str + '}' ;
}
function isPadded ( el ) {
return /^-?0\d/ . test ( el ) ;
}
function lte ( i , y ) {
return i <= y ;
}
function gte ( i , y ) {
return i >= y ;
}
function expand ( str , isTop ) {
var expansions = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m || /\$$/ . test ( m . pre ) ) return [ str ] ;
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isSequence = isNumericSequence || isAlphaSequence ;
var isOptions = m . body . indexOf ( ',' ) >= 0 ;
if ( ! isSequence && ! isOptions ) {
// {a},b}
if ( m . post . match ( /,.*\}/ ) ) {
str = m . pre + '{' + m . body + escClose + m . post ;
return expand ( str ) ;
}
return [ str ] ;
}
var n ;
if ( isSequence ) {
n = m . body . split ( /\.\./ ) ;
} else {
n = parseCommaParts ( m . body ) ;
if ( n . length === 1 ) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand ( n [ 0 ] , false ) . map ( embrace ) ;
if ( n . length === 1 ) {
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
return post . map ( function ( p ) {
return m . pre + n [ 0 ] + p ;
} ) ;
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m . pre ;
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
var N ;
if ( isSequence ) {
var x = numeric ( n [ 0 ] ) ;
var y = numeric ( n [ 1 ] ) ;
var width = Math . max ( n [ 0 ] . length , n [ 1 ] . length )
var incr = n . length == 3
? Math . abs ( numeric ( n [ 2 ] ) )
: 1 ;
var test = lte ;
var reverse = y < x ;
if ( reverse ) {
incr *= - 1 ;
test = gte ;
}
var pad = n . some ( isPadded ) ;
N = [ ] ;
for ( var i = x ; test ( i , y ) ; i += incr ) {
var c ;
if ( isAlphaSequence ) {
c = String . fromCharCode ( i ) ;
if ( c === '\\' )
c = '' ;
} else {
c = String ( i ) ;
if ( pad ) {
var need = width - c . length ;
if ( need > 0 ) {
var z = new Array ( need + 1 ) . join ( '0' ) ;
if ( i < 0 )
c = '-' + z + c . slice ( 1 ) ;
else
c = z + c ;
}
}
}
N . push ( c ) ;
}
} else {
N = concatMap ( n , function ( el ) { return expand ( el , false ) } ) ;
}
for ( var j = 0 ; j < N . length ; j ++ ) {
for ( var k = 0 ; k < post . length ; k ++ ) {
var expansion = pre + N [ j ] + post [ k ] ;
if ( ! isTop || isSequence || expansion )
expansions . push ( expansion ) ;
}
}
return expansions ;
}
/***/ } ) ,
/***/ 734 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var pathModule = _ _webpack _require _ _ ( 622 ) ;
var isWindows = process . platform === 'win32' ;
var fs = _ _webpack _require _ _ ( 747 ) ;
// JavaScript implementation of realpath, ported from node pre-v6
var DEBUG = process . env . NODE _DEBUG && /fs/ . test ( process . env . NODE _DEBUG ) ;
function rethrow ( ) {
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
// is fairly slow to generate.
var callback ;
if ( DEBUG ) {
var backtrace = new Error ;
callback = debugCallback ;
} else
callback = missingCallback ;
return callback ;
function debugCallback ( err ) {
if ( err ) {
backtrace . message = err . message ;
err = backtrace ;
missingCallback ( err ) ;
}
}
function missingCallback ( err ) {
if ( err ) {
if ( process . throwDeprecation )
throw err ; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
else if ( ! process . noDeprecation ) {
var msg = 'fs: missing callback ' + ( err . stack || err . message ) ;
if ( process . traceDeprecation )
console . trace ( msg ) ;
else
console . error ( msg ) ;
}
}
}
}
function maybeCallback ( cb ) {
return typeof cb === 'function' ? cb : rethrow ( ) ;
}
var normalize = pathModule . normalize ;
// Regexp that finds the next partion of a (partial) path
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
if ( isWindows ) {
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g ;
} else {
var nextPartRe = /(.*?)(?:[\/]+|$)/g ;
}
// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
if ( isWindows ) {
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/ ;
} else {
var splitRootRe = /^[\/]*/ ;
}
exports . realpathSync = function realpathSync ( p , cache ) {
// make p is absolute
p = pathModule . resolve ( p ) ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , p ) ) {
return cache [ p ] ;
}
var original = p ,
seenLinks = { } ,
knownHard = { } ;
// current character position in p
var pos ;
// the partial path so far, including a trailing slash if any
var current ;
// the partial path without a trailing slash (except when pointing at a root)
var base ;
// the partial path scanned in the previous round, with slash
var previous ;
start ( ) ;
function start ( ) {
// Skip over roots
var m = splitRootRe . exec ( p ) ;
pos = m [ 0 ] . length ;
current = m [ 0 ] ;
base = m [ 0 ] ;
previous = '' ;
// On windows, check that the root exists. On unix there is no need.
if ( isWindows && ! knownHard [ base ] ) {
fs . lstatSync ( base ) ;
knownHard [ base ] = true ;
}
}
// walk down the path, swapping out linked pathparts for their real
// values
// NB: p.length changes.
while ( pos < p . length ) {
// find the next part
nextPartRe . lastIndex = pos ;
var result = nextPartRe . exec ( p ) ;
previous = current ;
current += result [ 0 ] ;
base = previous + result [ 1 ] ;
pos = nextPartRe . lastIndex ;
// continue if not a symlink
if ( knownHard [ base ] || ( cache && cache [ base ] === base ) ) {
continue ;
}
var resolvedLink ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , base ) ) {
// some known symbolic link. no need to stat again.
resolvedLink = cache [ base ] ;
} else {
var stat = fs . lstatSync ( base ) ;
if ( ! stat . isSymbolicLink ( ) ) {
knownHard [ base ] = true ;
if ( cache ) cache [ base ] = base ;
continue ;
}
// read the link if it wasn't read before
// dev/ino always return 0 on windows, so skip the check.
var linkTarget = null ;
if ( ! isWindows ) {
var id = stat . dev . toString ( 32 ) + ':' + stat . ino . toString ( 32 ) ;
if ( seenLinks . hasOwnProperty ( id ) ) {
linkTarget = seenLinks [ id ] ;
}
}
if ( linkTarget === null ) {
fs . statSync ( base ) ;
linkTarget = fs . readlinkSync ( base ) ;
}
resolvedLink = pathModule . resolve ( previous , linkTarget ) ;
// track this, if given a cache.
if ( cache ) cache [ base ] = resolvedLink ;
if ( ! isWindows ) seenLinks [ id ] = linkTarget ;
}
// resolve the link, then start over
p = pathModule . resolve ( resolvedLink , p . slice ( pos ) ) ;
start ( ) ;
}
if ( cache ) cache [ original ] = p ;
return p ;
} ;
exports . realpath = function realpath ( p , cache , cb ) {
if ( typeof cb !== 'function' ) {
cb = maybeCallback ( cache ) ;
cache = null ;
}
// make p is absolute
p = pathModule . resolve ( p ) ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , p ) ) {
return process . nextTick ( cb . bind ( null , null , cache [ p ] ) ) ;
}
var original = p ,
seenLinks = { } ,
knownHard = { } ;
// current character position in p
var pos ;
// the partial path so far, including a trailing slash if any
var current ;
// the partial path without a trailing slash (except when pointing at a root)
var base ;
// the partial path scanned in the previous round, with slash
var previous ;
start ( ) ;
function start ( ) {
// Skip over roots
var m = splitRootRe . exec ( p ) ;
pos = m [ 0 ] . length ;
current = m [ 0 ] ;
base = m [ 0 ] ;
previous = '' ;
// On windows, check that the root exists. On unix there is no need.
if ( isWindows && ! knownHard [ base ] ) {
fs . lstat ( base , function ( err ) {
if ( err ) return cb ( err ) ;
knownHard [ base ] = true ;
LOOP ( ) ;
} ) ;
} else {
process . nextTick ( LOOP ) ;
}
}
// walk down the path, swapping out linked pathparts for their real
// values
function LOOP ( ) {
// stop if scanned past end of path
if ( pos >= p . length ) {
if ( cache ) cache [ original ] = p ;
return cb ( null , p ) ;
}
// find the next part
nextPartRe . lastIndex = pos ;
var result = nextPartRe . exec ( p ) ;
previous = current ;
current += result [ 0 ] ;
base = previous + result [ 1 ] ;
pos = nextPartRe . lastIndex ;
// continue if not a symlink
if ( knownHard [ base ] || ( cache && cache [ base ] === base ) ) {
return process . nextTick ( LOOP ) ;
}
if ( cache && Object . prototype . hasOwnProperty . call ( cache , base ) ) {
// known symbolic link. no need to stat again.
return gotResolvedLink ( cache [ base ] ) ;
}
return fs . lstat ( base , gotStat ) ;
}
function gotStat ( err , stat ) {
if ( err ) return cb ( err ) ;
// if not a symlink, skip to the next path part
if ( ! stat . isSymbolicLink ( ) ) {
knownHard [ base ] = true ;
if ( cache ) cache [ base ] = base ;
return process . nextTick ( LOOP ) ;
}
// stat & read the link if not read before
// call gotTarget as soon as the link target is known
// dev/ino always return 0 on windows, so skip the check.
if ( ! isWindows ) {
var id = stat . dev . toString ( 32 ) + ':' + stat . ino . toString ( 32 ) ;
if ( seenLinks . hasOwnProperty ( id ) ) {
return gotTarget ( null , seenLinks [ id ] , base ) ;
}
}
fs . stat ( base , function ( err ) {
if ( err ) return cb ( err ) ;
fs . readlink ( base , function ( err , target ) {
if ( ! isWindows ) seenLinks [ id ] = target ;
gotTarget ( err , target ) ;
} ) ;
} ) ;
}
function gotTarget ( err , target , base ) {
if ( err ) return cb ( err ) ;
var resolvedLink = pathModule . resolve ( previous , target ) ;
if ( cache ) cache [ base ] = resolvedLink ;
gotResolvedLink ( resolvedLink ) ;
}
function gotResolvedLink ( resolvedLink ) {
// resolve the link, then start over
p = pathModule . resolve ( resolvedLink , p . slice ( pos ) ) ;
start ( ) ;
}
} ;
/***/ } ) ,
/***/ 741 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
const validRange = ( range , options ) => {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range ( range , options ) . range || '*'
} catch ( er ) {
return null
}
}
module . exports = validRange
/***/ } ) ,
/***/ 747 :
/***/ ( function ( module ) {
module . exports = require ( "fs" ) ;
2020-10-20 15:18:02 +02:00
/***/ } ) ,
/***/ 750 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const parse = _ _webpack _require _ _ ( 830 )
module . exports = function ( data , options = { } ) {
if ( typeof data === 'string' ) {
data = Buffer . from ( data )
}
const records = options && options . objname ? { } : [ ]
const parser = new parse . Parser ( options )
parser . push = function ( record ) {
if ( record === null ) {
return
}
if ( options . objname === undefined )
records . push ( record )
else {
records [ record [ 0 ] ] = record [ 1 ]
}
}
const err1 = parser . _ _parse ( data , false )
if ( err1 !== undefined ) throw err1
const err2 = parser . _ _parse ( undefined , true )
if ( err2 !== undefined ) throw err2
return records
}
2020-09-02 10:07:11 +02:00
/***/ } ) ,
/***/ 757 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . exec = void 0 ;
const aexec = _ _importStar ( _ _webpack _require _ _ ( 514 ) ) ;
exports . exec = ( command , args = [ ] , silent ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
let stdout = '' ;
let stderr = '' ;
const options = {
silent : silent ,
ignoreReturnCode : true
} ;
options . listeners = {
stdout : ( data ) => {
stdout += data . toString ( ) ;
} ,
stderr : ( data ) => {
stderr += data . toString ( ) ;
}
} ;
const returnCode = yield aexec . exec ( command , args , options ) ;
return {
success : returnCode === 0 ,
stdout : stdout . trim ( ) ,
stderr : stderr . trim ( )
} ;
} ) ;
//# sourceMappingURL=exec.js.map
/***/ } ) ,
/***/ 760 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = balanced ;
function balanced ( a , b , str ) {
if ( a instanceof RegExp ) a = maybeMatch ( a , str ) ;
if ( b instanceof RegExp ) b = maybeMatch ( b , str ) ;
var r = range ( a , b , str ) ;
return r && {
start : r [ 0 ] ,
end : r [ 1 ] ,
pre : str . slice ( 0 , r [ 0 ] ) ,
body : str . slice ( r [ 0 ] + a . length , r [ 1 ] ) ,
post : str . slice ( r [ 1 ] + b . length )
} ;
}
function maybeMatch ( reg , str ) {
var m = str . match ( reg ) ;
return m ? m [ 0 ] : null ;
}
balanced . range = range ;
function range ( a , b , str ) {
var begs , beg , left , right , result ;
var ai = str . indexOf ( a ) ;
var bi = str . indexOf ( b , ai + 1 ) ;
var i = ai ;
if ( ai >= 0 && bi > 0 ) {
begs = [ ] ;
left = str . length ;
while ( i >= 0 && ! result ) {
if ( i == ai ) {
begs . push ( i ) ;
ai = str . indexOf ( a , i + 1 ) ;
} else if ( begs . length == 1 ) {
result = [ begs . pop ( ) , bi ] ;
} else {
beg = begs . pop ( ) ;
if ( beg < left ) {
left = beg ;
right = bi ;
}
bi = str . indexOf ( b , i + 1 ) ;
}
i = ai < bi && ai >= 0 ? ai : bi ;
}
if ( begs . length ) {
result = [ left , right ] ;
}
}
return result ;
}
/***/ } ) ,
/***/ 761 :
/***/ ( function ( module ) {
module . exports = require ( "zlib" ) ;
/***/ } ) ,
/***/ 762 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
var beforeAfterHook = _ _webpack _require _ _ ( 682 ) ;
var request = _ _webpack _require _ _ ( 234 ) ;
var graphql = _ _webpack _require _ _ ( 668 ) ;
var authToken = _ _webpack _require _ _ ( 334 ) ;
function _defineProperty ( obj , key , value ) {
if ( key in obj ) {
Object . defineProperty ( obj , key , {
value : value ,
enumerable : true ,
configurable : true ,
writable : true
} ) ;
} else {
obj [ key ] = value ;
}
return obj ;
}
function ownKeys ( object , enumerableOnly ) {
var keys = Object . keys ( object ) ;
if ( Object . getOwnPropertySymbols ) {
var symbols = Object . getOwnPropertySymbols ( object ) ;
if ( enumerableOnly ) symbols = symbols . filter ( function ( sym ) {
return Object . getOwnPropertyDescriptor ( object , sym ) . enumerable ;
} ) ;
keys . push . apply ( keys , symbols ) ;
}
return keys ;
}
function _objectSpread2 ( target ) {
for ( var i = 1 ; i < arguments . length ; i ++ ) {
var source = arguments [ i ] != null ? arguments [ i ] : { } ;
if ( i % 2 ) {
ownKeys ( Object ( source ) , true ) . forEach ( function ( key ) {
_defineProperty ( target , key , source [ key ] ) ;
} ) ;
} else if ( Object . getOwnPropertyDescriptors ) {
Object . defineProperties ( target , Object . getOwnPropertyDescriptors ( source ) ) ;
} else {
ownKeys ( Object ( source ) ) . forEach ( function ( key ) {
Object . defineProperty ( target , key , Object . getOwnPropertyDescriptor ( source , key ) ) ;
} ) ;
}
}
return target ;
}
const VERSION = "3.1.2" ;
class Octokit {
constructor ( options = { } ) {
const hook = new beforeAfterHook . Collection ( ) ;
const requestDefaults = {
baseUrl : request . request . endpoint . DEFAULTS . baseUrl ,
headers : { } ,
request : Object . assign ( { } , options . request , {
hook : hook . bind ( null , "request" )
} ) ,
mediaType : {
previews : [ ] ,
format : ""
}
} ; // prepend default user agent with `options.userAgent` if set
requestDefaults . headers [ "user-agent" ] = [ options . userAgent , ` octokit-core.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } ` ] . filter ( Boolean ) . join ( " " ) ;
if ( options . baseUrl ) {
requestDefaults . baseUrl = options . baseUrl ;
}
if ( options . previews ) {
requestDefaults . mediaType . previews = options . previews ;
}
if ( options . timeZone ) {
requestDefaults . headers [ "time-zone" ] = options . timeZone ;
}
this . request = request . request . defaults ( requestDefaults ) ;
this . graphql = graphql . withCustomRequest ( this . request ) . defaults ( _objectSpread2 ( _objectSpread2 ( { } , requestDefaults ) , { } , {
baseUrl : requestDefaults . baseUrl . replace ( /\/api\/v3$/ , "/api" )
} ) ) ;
this . log = Object . assign ( {
debug : ( ) => { } ,
info : ( ) => { } ,
warn : console . warn . bind ( console ) ,
error : console . error . bind ( console )
} , options . log ) ;
this . hook = hook ; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
if ( ! options . authStrategy ) {
if ( ! options . auth ) {
// (1)
this . auth = async ( ) => ( {
type : "unauthenticated"
} ) ;
} else {
// (2)
const auth = authToken . createTokenAuth ( options . auth ) ; // @ts-ignore ¯\_(ツ)_/¯
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
}
} else {
const auth = options . authStrategy ( Object . assign ( {
request : this . request
} , options . auth ) ) ; // @ts-ignore ¯\_(ツ)_/¯
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
} // apply plugins
// https://stackoverflow.com/a/16345172
const classConstructor = this . constructor ;
classConstructor . plugins . forEach ( plugin => {
Object . assign ( this , plugin ( this , options ) ) ;
} ) ;
}
static defaults ( defaults ) {
const OctokitWithDefaults = class extends this {
constructor ( ... args ) {
const options = args [ 0 ] || { } ;
if ( typeof defaults === "function" ) {
super ( defaults ( options ) ) ;
return ;
}
super ( Object . assign ( { } , defaults , options , options . userAgent && defaults . userAgent ? {
userAgent : ` ${ options . userAgent } ${ defaults . userAgent } `
} : null ) ) ;
}
} ;
return OctokitWithDefaults ;
}
/ * *
* Attach a plugin ( or many ) to your Octokit instance .
*
* @ example
* const API = Octokit . plugin ( plugin1 , plugin2 , plugin3 , ... )
* /
static plugin ( ... newPlugins ) {
var _a ;
const currentPlugins = this . plugins ;
const NewOctokit = ( _a = class extends this { } , _a . plugins = currentPlugins . concat ( newPlugins . filter ( plugin => ! currentPlugins . includes ( plugin ) ) ) , _a ) ;
return NewOctokit ;
}
}
Octokit . VERSION = VERSION ;
Octokit . plugins = [ ] ;
exports . Octokit = Octokit ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 804 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const compareLoose = ( a , b ) => compare ( a , b , true )
module . exports = compareLoose
/***/ } ) ,
/***/ 818 :
/***/ ( function ( module ) {
module . exports = require ( "tls" ) ;
/***/ } ) ,
/***/ 819 :
/***/ ( function ( module ) {
module . exports = removeHook
function removeHook ( state , name , method ) {
if ( ! state . registry [ name ] ) {
return
}
var index = state . registry [ name ]
. map ( function ( registered ) { return registered . orig } )
. indexOf ( method )
if ( index === - 1 ) {
return
}
state . registry [ name ] . splice ( index , 1 )
}
/***/ } ) ,
/***/ 828 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// hoisted class for cyclic dependency
class Range {
constructor ( range , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( range instanceof Range ) {
if (
range . loose === ! ! options . loose &&
range . includePrerelease === ! ! options . includePrerelease
) {
return range
} else {
return new Range ( range . raw , options )
}
}
if ( range instanceof Comparator ) {
// just put it in the set and return
this . raw = range . value
this . set = [ [ range ] ]
this . format ( )
return this
}
this . options = options
this . loose = ! ! options . loose
this . includePrerelease = ! ! options . includePrerelease
// First, split based on boolean or ||
this . raw = range
this . set = range
. split ( /\s*\|\|\s*/ )
// map the range to a 2d array of comparators
. map ( range => this . parseRange ( range . trim ( ) ) )
// throw out any comparator lists that are empty
// this generally means that it was not a valid range, which is allowed
// in loose mode, but will still throw if the WHOLE range is invalid.
. filter ( c => c . length )
if ( ! this . set . length ) {
throw new TypeError ( ` Invalid SemVer Range: ${ range } ` )
}
this . format ( )
}
format ( ) {
this . range = this . set
. map ( ( comps ) => {
return comps . join ( ' ' ) . trim ( )
} )
. join ( '||' )
. trim ( )
return this . range
}
toString ( ) {
return this . range
}
parseRange ( range ) {
const loose = this . options . loose
range = range . trim ( )
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
const hr = loose ? re [ t . HYPHENRANGELOOSE ] : re [ t . HYPHENRANGE ]
range = range . replace ( hr , hyphenReplace ( this . options . includePrerelease ) )
debug ( 'hyphen replace' , range )
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range . replace ( re [ t . COMPARATORTRIM ] , comparatorTrimReplace )
debug ( 'comparator trim' , range , re [ t . COMPARATORTRIM ] )
// `~ 1.2.3` => `~1.2.3`
range = range . replace ( re [ t . TILDETRIM ] , tildeTrimReplace )
// `^ 1.2.3` => `^1.2.3`
range = range . replace ( re [ t . CARETTRIM ] , caretTrimReplace )
// normalize spaces
range = range . split ( /\s+/ ) . join ( ' ' )
// At this point, the range is completely trimmed and
// ready to be split into comparators.
const compRe = loose ? re [ t . COMPARATORLOOSE ] : re [ t . COMPARATOR ]
return range
. split ( ' ' )
. map ( comp => parseComparator ( comp , this . options ) )
. join ( ' ' )
. split ( /\s+/ )
. map ( comp => replaceGTE0 ( comp , this . options ) )
// in loose mode, throw out any that are not valid comparators
. filter ( this . options . loose ? comp => ! ! comp . match ( compRe ) : ( ) => true )
. map ( comp => new Comparator ( comp , this . options ) )
}
intersects ( range , options ) {
if ( ! ( range instanceof Range ) ) {
throw new TypeError ( 'a Range is required' )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
return this . set . some ( ( thisComparators ) => {
return (
isSatisfiable ( thisComparators , options ) &&
range . set . some ( ( rangeComparators ) => {
return (
isSatisfiable ( rangeComparators , options ) &&
thisComparators . every ( ( thisComparator ) => {
return rangeComparators . every ( ( rangeComparator ) => {
return thisComparator . intersects ( rangeComparator , options )
} )
} )
)
} )
)
} )
}
// if ANY of the sets match ALL of its comparators, then pass
test ( version ) {
if ( ! version ) {
return false
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
for ( let i = 0 ; i < this . set . length ; i ++ ) {
if ( testSet ( this . set [ i ] , version , this . options ) ) {
return true
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
return false
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = Range
const Comparator = _ _webpack _require _ _ ( 532 )
const debug = _ _webpack _require _ _ ( 427 )
const SemVer = _ _webpack _require _ _ ( 88 )
const {
re ,
t ,
comparatorTrimReplace ,
tildeTrimReplace ,
caretTrimReplace
} = _ _webpack _require _ _ ( 523 )
// take a set of comparators and determine whether there
// exists a version which can satisfy it
const isSatisfiable = ( comparators , options ) => {
let result = true
const remainingComparators = comparators . slice ( )
let testComparator = remainingComparators . pop ( )
while ( result && remainingComparators . length ) {
result = remainingComparators . every ( ( otherComparator ) => {
return testComparator . intersects ( otherComparator , options )
} )
testComparator = remainingComparators . pop ( )
}
return result
}
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
const parseComparator = ( comp , options ) => {
debug ( 'comp' , comp , options )
comp = replaceCarets ( comp , options )
debug ( 'caret' , comp )
comp = replaceTildes ( comp , options )
debug ( 'tildes' , comp )
comp = replaceXRanges ( comp , options )
debug ( 'xrange' , comp )
comp = replaceStars ( comp , options )
debug ( 'stars' , comp )
return comp
}
const isX = id => ! id || id . toLowerCase ( ) === 'x' || id === '*'
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0
const replaceTildes = ( comp , options ) =>
comp . trim ( ) . split ( /\s+/ ) . map ( ( comp ) => {
return replaceTilde ( comp , options )
} ) . join ( ' ' )
const replaceTilde = ( comp , options ) => {
const r = options . loose ? re [ t . TILDELOOSE ] : re [ t . TILDE ]
return comp . replace ( r , ( _ , M , m , p , pr ) => {
debug ( 'tilde' , comp , _ , M , m , p , pr )
let ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = ` >= ${ M } .0.0 < ${ + M + 1 } .0.0-0 `
} else if ( isX ( p ) ) {
// ~1.2 == >=1.2.0 <1.3.0-0
ret = ` >= ${ M } . ${ m } .0 < ${ M } . ${ + m + 1 } .0-0 `
} else if ( pr ) {
debug ( 'replaceTilde pr' , pr )
ret = ` >= ${ M } . ${ m } . ${ p } - ${ pr
} < $ { M } . $ { + m + 1 } . 0 - 0 `
} else {
// ~1.2.3 == >=1.2.3 <1.3.0-0
ret = ` >= ${ M } . ${ m } . ${ p
} < $ { M } . $ { + m + 1 } . 0 - 0 `
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
debug ( 'tilde return' , ret )
return ret
} )
}
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0
// ^1.2.3 --> >=1.2.3 <2.0.0-0
// ^1.2.0 --> >=1.2.0 <2.0.0-0
const replaceCarets = ( comp , options ) =>
comp . trim ( ) . split ( /\s+/ ) . map ( ( comp ) => {
return replaceCaret ( comp , options )
} ) . join ( ' ' )
const replaceCaret = ( comp , options ) => {
debug ( 'caret' , comp , options )
const r = options . loose ? re [ t . CARETLOOSE ] : re [ t . CARET ]
const z = options . includePrerelease ? '-0' : ''
return comp . replace ( r , ( _ , M , m , p , pr ) => {
debug ( 'caret' , comp , _ , M , m , p , pr )
let ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = ` >= ${ M } .0.0 ${ z } < ${ + M + 1 } .0.0-0 `
} else if ( isX ( p ) ) {
if ( M === '0' ) {
ret = ` >= ${ M } . ${ m } .0 ${ z } < ${ M } . ${ + m + 1 } .0-0 `
} else {
ret = ` >= ${ M } . ${ m } .0 ${ z } < ${ + M + 1 } .0.0-0 `
}
} else if ( pr ) {
debug ( 'replaceCaret pr' , pr )
if ( M === '0' ) {
if ( m === '0' ) {
ret = ` >= ${ M } . ${ m } . ${ p } - ${ pr
} < $ { M } . $ { m } . $ { + p + 1 } - 0 `
} else {
ret = ` >= ${ M } . ${ m } . ${ p } - ${ pr
} < $ { M } . $ { + m + 1 } . 0 - 0 `
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else {
ret = ` >= ${ M } . ${ m } . ${ p } - ${ pr
} < $ { + M + 1 } . 0.0 - 0 `
}
} else {
debug ( 'no pr' )
if ( M === '0' ) {
if ( m === '0' ) {
ret = ` >= ${ M } . ${ m } . ${ p
} $ { z } < $ { M } . $ { m } . $ { + p + 1 } - 0 `
} else {
ret = ` >= ${ M } . ${ m } . ${ p
} $ { z } < $ { M } . $ { + m + 1 } . 0 - 0 `
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else {
ret = ` >= ${ M } . ${ m } . ${ p
} < $ { + M + 1 } . 0.0 - 0 `
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
debug ( 'caret return' , ret )
return ret
} )
}
const replaceXRanges = ( comp , options ) => {
debug ( 'replaceXRanges' , comp , options )
return comp . split ( /\s+/ ) . map ( ( comp ) => {
return replaceXRange ( comp , options )
} ) . join ( ' ' )
}
const replaceXRange = ( comp , options ) => {
comp = comp . trim ( )
const r = options . loose ? re [ t . XRANGELOOSE ] : re [ t . XRANGE ]
return comp . replace ( r , ( ret , gtlt , M , m , p , pr ) => {
debug ( 'xRange' , comp , ret , gtlt , M , m , p , pr )
const xM = isX ( M )
const xm = xM || isX ( m )
const xp = xm || isX ( p )
const anyX = xp
if ( gtlt === '=' && anyX ) {
gtlt = ''
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
// if we're including prereleases in the match, then we need
// to fix this to -0, the lowest possible prerelease value
pr = options . includePrerelease ? '-0' : ''
if ( xM ) {
if ( gtlt === '>' || gtlt === '<' ) {
// nothing is allowed
ret = '<0.0.0-0'
} else {
// nothing is forbidden
ret = '*'
}
} else if ( gtlt && anyX ) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if ( xm ) {
m = 0
}
p = 0
if ( gtlt === '>' ) {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
gtlt = '>='
if ( xm ) {
M = + M + 1
m = 0
p = 0
} else {
m = + m + 1
p = 0
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else if ( gtlt === '<=' ) {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if ( xm ) {
M = + M + 1
} else {
m = + m + 1
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
}
if ( gtlt === '<' )
pr = '-0'
ret = ` ${ gtlt + M } . ${ m } . ${ p } ${ pr } `
} else if ( xm ) {
ret = ` >= ${ M } .0.0 ${ pr } < ${ + M + 1 } .0.0-0 `
} else if ( xp ) {
ret = ` >= ${ M } . ${ m } .0 ${ pr
} < $ { M } . $ { + m + 1 } . 0 - 0 `
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
debug ( 'xRange return' , ret )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return ret
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
const replaceStars = ( comp , options ) => {
debug ( 'replaceStars' , comp , options )
// Looseness is ignored here. star is always as loose as it gets!
return comp . trim ( ) . replace ( re [ t . STAR ] , '' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const replaceGTE0 = ( comp , options ) => {
debug ( 'replaceGTE0' , comp , options )
return comp . trim ( )
. replace ( re [ options . includePrerelease ? t . GTE0PRE : t . GTE0 ] , '' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// This function is passed to string.replace(re[t.HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0-0
const hyphenReplace = incPr => ( $0 ,
from , fM , fm , fp , fpr , fb ,
to , tM , tm , tp , tpr , tb ) => {
if ( isX ( fM ) ) {
from = ''
} else if ( isX ( fm ) ) {
from = ` >= ${ fM } .0.0 ${ incPr ? '-0' : '' } `
} else if ( isX ( fp ) ) {
from = ` >= ${ fM } . ${ fm } .0 ${ incPr ? '-0' : '' } `
} else if ( fpr ) {
from = ` >= ${ from } `
} else {
from = ` >= ${ from } ${ incPr ? '-0' : '' } `
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( isX ( tM ) ) {
to = ''
} else if ( isX ( tm ) ) {
to = ` < ${ + tM + 1 } .0.0-0 `
} else if ( isX ( tp ) ) {
to = ` < ${ tM } . ${ + tm + 1 } .0-0 `
} else if ( tpr ) {
to = ` <= ${ tM } . ${ tm } . ${ tp } - ${ tpr } `
} else if ( incPr ) {
to = ` < ${ tM } . ${ tm } . ${ + tp + 1 } -0 `
} else {
to = ` <= ${ to } `
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
return ( ` ${ from } ${ to } ` ) . trim ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const testSet = ( set , version , options ) => {
for ( let i = 0 ; i < set . length ; i ++ ) {
if ( ! set [ i ] . test ( version ) ) {
return false
}
}
if ( version . prerelease . length && ! options . includePrerelease ) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for ( let i = 0 ; i < set . length ; i ++ ) {
debug ( set [ i ] . semver )
if ( set [ i ] . semver === Comparator . ANY ) {
continue
}
if ( set [ i ] . semver . prerelease . length > 0 ) {
const allowed = set [ i ] . semver
if ( allowed . major === version . major &&
allowed . minor === version . minor &&
allowed . patch === version . patch ) {
return true
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Version has a -pre, but it's not one of the ones we like.
return false
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
return true
2020-08-23 03:31:38 +02:00
}
2020-10-20 15:18:02 +02:00
/***/ } ) ,
/***/ 830 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
/ *
CSV Parse
Please look at the [ project documentation ] ( https : //csv.js.org/parse/) for
additional information .
* /
const { Transform } = _ _webpack _require _ _ ( 413 )
const ResizeableBuffer = _ _webpack _require _ _ ( 942 )
const tab = 9
const nl = 10
const np = 12
const cr = 13
const space = 32
const bom _utf8 = Buffer . from ( [ 239 , 187 , 191 ] )
class Parser extends Transform {
constructor ( opts = { } ) {
super ( { ... { readableObjectMode : true } , ... opts } )
const options = { }
// Merge with user options
for ( let opt in opts ) {
options [ underscore ( opt ) ] = opts [ opt ]
}
// Normalize option `bom`
if ( options . bom === undefined || options . bom === null || options . bom === false ) {
options . bom = false
} else if ( options . bom !== true ) {
throw new CsvError ( 'CSV_INVALID_OPTION_BOM' , [
'Invalid option bom:' , 'bom must be true,' ,
` got ${ JSON . stringify ( options . bom ) } `
] )
}
// Normalize option `cast`
let fnCastField = null
if ( options . cast === undefined || options . cast === null || options . cast === false || options . cast === '' ) {
options . cast = undefined
} else if ( typeof options . cast === 'function' ) {
fnCastField = options . cast
options . cast = true
} else if ( options . cast !== true ) {
throw new CsvError ( 'CSV_INVALID_OPTION_CAST' , [
'Invalid option cast:' , 'cast must be true or a function,' ,
` got ${ JSON . stringify ( options . cast ) } `
] )
}
// Normalize option `cast_date`
if ( options . cast _date === undefined || options . cast _date === null || options . cast _date === false || options . cast _date === '' ) {
options . cast _date = false
} else if ( options . cast _date === true ) {
options . cast _date = function ( value ) {
const date = Date . parse ( value )
return ! isNaN ( date ) ? new Date ( date ) : value
}
} else if ( typeof options . cast _date !== 'function' ) {
throw new CsvError ( 'CSV_INVALID_OPTION_CAST_DATE' , [
'Invalid option cast_date:' , 'cast_date must be true or a function,' ,
` got ${ JSON . stringify ( options . cast _date ) } `
] )
}
// Normalize option `columns`
let fnFirstLineToHeaders = null
if ( options . columns === true ) {
// Fields in the first line are converted as-is to columns
fnFirstLineToHeaders = undefined
} else if ( typeof options . columns === 'function' ) {
fnFirstLineToHeaders = options . columns
options . columns = true
} else if ( Array . isArray ( options . columns ) ) {
options . columns = normalizeColumnsArray ( options . columns )
} else if ( options . columns === undefined || options . columns === null || options . columns === false ) {
options . columns = false
} else {
throw new CsvError ( 'CSV_INVALID_OPTION_COLUMNS' , [
'Invalid option columns:' ,
'expect an object, a function or true,' ,
` got ${ JSON . stringify ( options . columns ) } `
] )
}
// Normalize option `columns_duplicates_to_array`
if ( options . columns _duplicates _to _array === undefined || options . columns _duplicates _to _array === null || options . columns _duplicates _to _array === false ) {
options . columns _duplicates _to _array = false
} else if ( options . columns _duplicates _to _array !== true ) {
throw new CsvError ( 'CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY' , [
'Invalid option columns_duplicates_to_array:' ,
'expect an boolean,' ,
` got ${ JSON . stringify ( options . columns _duplicates _to _array ) } `
] )
}
// Normalize option `comment`
if ( options . comment === undefined || options . comment === null || options . comment === false || options . comment === '' ) {
options . comment = null
} else {
if ( typeof options . comment === 'string' ) {
options . comment = Buffer . from ( options . comment )
}
if ( ! Buffer . isBuffer ( options . comment ) ) {
throw new CsvError ( 'CSV_INVALID_OPTION_COMMENT' , [
'Invalid option comment:' ,
'comment must be a buffer or a string,' ,
` got ${ JSON . stringify ( options . comment ) } `
] )
}
}
// Normalize option `delimiter`
const delimiter _json = JSON . stringify ( options . delimiter )
if ( ! Array . isArray ( options . delimiter ) ) options . delimiter = [ options . delimiter ]
if ( options . delimiter . length === 0 ) {
throw new CsvError ( 'CSV_INVALID_OPTION_DELIMITER' , [
'Invalid option delimiter:' ,
'delimiter must be a non empty string or buffer or array of string|buffer,' ,
` got ${ delimiter _json } `
] )
}
options . delimiter = options . delimiter . map ( function ( delimiter ) {
if ( delimiter === undefined || delimiter === null || delimiter === false ) {
return Buffer . from ( ',' )
}
if ( typeof delimiter === 'string' ) {
delimiter = Buffer . from ( delimiter )
}
if ( ! Buffer . isBuffer ( delimiter ) || delimiter . length === 0 ) {
throw new CsvError ( 'CSV_INVALID_OPTION_DELIMITER' , [
'Invalid option delimiter:' ,
'delimiter must be a non empty string or buffer or array of string|buffer,' ,
` got ${ delimiter _json } `
] )
}
return delimiter
} )
// Normalize option `escape`
if ( options . escape === undefined || options . escape === true ) {
options . escape = Buffer . from ( '"' )
} else if ( typeof options . escape === 'string' ) {
options . escape = Buffer . from ( options . escape )
} else if ( options . escape === null || options . escape === false ) {
options . escape = null
}
if ( options . escape !== null ) {
if ( ! Buffer . isBuffer ( options . escape ) ) {
throw new Error ( ` Invalid Option: escape must be a buffer, a string or a boolean, got ${ JSON . stringify ( options . escape ) } ` )
} else if ( options . escape . length !== 1 ) {
throw new Error ( ` Invalid Option Length: escape must be one character, got ${ options . escape . length } ` )
} else {
options . escape = options . escape [ 0 ]
}
}
// Normalize option `from`
if ( options . from === undefined || options . from === null ) {
options . from = 1
} else {
if ( typeof options . from === 'string' && /\d+/ . test ( options . from ) ) {
options . from = parseInt ( options . from )
}
if ( Number . isInteger ( options . from ) ) {
if ( options . from < 0 ) {
throw new Error ( ` Invalid Option: from must be a positive integer, got ${ JSON . stringify ( opts . from ) } ` )
}
} else {
throw new Error ( ` Invalid Option: from must be an integer, got ${ JSON . stringify ( options . from ) } ` )
}
}
// Normalize option `from_line`
if ( options . from _line === undefined || options . from _line === null ) {
options . from _line = 1
} else {
if ( typeof options . from _line === 'string' && /\d+/ . test ( options . from _line ) ) {
options . from _line = parseInt ( options . from _line )
}
if ( Number . isInteger ( options . from _line ) ) {
if ( options . from _line <= 0 ) {
throw new Error ( ` Invalid Option: from_line must be a positive integer greater than 0, got ${ JSON . stringify ( opts . from _line ) } ` )
}
} else {
throw new Error ( ` Invalid Option: from_line must be an integer, got ${ JSON . stringify ( opts . from _line ) } ` )
}
}
// Normalize option `info`
if ( options . info === undefined || options . info === null || options . info === false ) {
options . info = false
} else if ( options . info !== true ) {
throw new Error ( ` Invalid Option: info must be true, got ${ JSON . stringify ( options . info ) } ` )
}
// Normalize option `max_record_size`
if ( options . max _record _size === undefined || options . max _record _size === null || options . max _record _size === false ) {
options . max _record _size = 0
} else if ( Number . isInteger ( options . max _record _size ) && options . max _record _size >= 0 ) {
// Great, nothing to do
} else if ( typeof options . max _record _size === 'string' && /\d+/ . test ( options . max _record _size ) ) {
options . max _record _size = parseInt ( options . max _record _size )
} else {
throw new Error ( ` Invalid Option: max_record_size must be a positive integer, got ${ JSON . stringify ( options . max _record _size ) } ` )
}
// Normalize option `objname`
if ( options . objname === undefined || options . objname === null || options . objname === false ) {
options . objname = undefined
} else if ( Buffer . isBuffer ( options . objname ) ) {
if ( options . objname . length === 0 ) {
throw new Error ( ` Invalid Option: objname must be a non empty buffer ` )
}
options . objname = options . objname . toString ( )
} else if ( typeof options . objname === 'string' ) {
if ( options . objname . length === 0 ) {
throw new Error ( ` Invalid Option: objname must be a non empty string ` )
}
// Great, nothing to do
} else {
throw new Error ( ` Invalid Option: objname must be a string or a buffer, got ${ options . objname } ` )
}
// Normalize option `on_record`
if ( options . on _record === undefined || options . on _record === null ) {
options . on _record = undefined
} else if ( typeof options . on _record !== 'function' ) {
throw new CsvError ( 'CSV_INVALID_OPTION_ON_RECORD' , [
'Invalid option `on_record`:' ,
'expect a function,' ,
` got ${ JSON . stringify ( options . on _record ) } `
] )
}
// Normalize option `quote`
if ( options . quote === null || options . quote === false || options . quote === '' ) {
options . quote = null
} else {
if ( options . quote === undefined || options . quote === true ) {
options . quote = Buffer . from ( '"' )
} else if ( typeof options . quote === 'string' ) {
options . quote = Buffer . from ( options . quote )
}
if ( ! Buffer . isBuffer ( options . quote ) ) {
throw new Error ( ` Invalid Option: quote must be a buffer or a string, got ${ JSON . stringify ( options . quote ) } ` )
} else if ( options . quote . length !== 1 ) {
throw new Error ( ` Invalid Option Length: quote must be one character, got ${ options . quote . length } ` )
} else {
options . quote = options . quote [ 0 ]
}
}
// Normalize option `raw`
if ( options . raw === undefined || options . raw === null || options . raw === false ) {
options . raw = false
} else if ( options . raw !== true ) {
throw new Error ( ` Invalid Option: raw must be true, got ${ JSON . stringify ( options . raw ) } ` )
}
// Normalize option `record_delimiter`
if ( ! options . record _delimiter ) {
options . record _delimiter = [ ]
} else if ( ! Array . isArray ( options . record _delimiter ) ) {
options . record _delimiter = [ options . record _delimiter ]
}
options . record _delimiter = options . record _delimiter . map ( function ( rd ) {
if ( typeof rd === 'string' ) {
rd = Buffer . from ( rd )
}
return rd
} )
// Normalize option `relax`
if ( typeof options . relax === 'boolean' ) {
// Great, nothing to do
} else if ( options . relax === undefined || options . relax === null ) {
options . relax = false
} else {
throw new Error ( ` Invalid Option: relax must be a boolean, got ${ JSON . stringify ( options . relax ) } ` )
}
// Normalize option `relax_column_count`
if ( typeof options . relax _column _count === 'boolean' ) {
// Great, nothing to do
} else if ( options . relax _column _count === undefined || options . relax _column _count === null ) {
options . relax _column _count = false
} else {
throw new Error ( ` Invalid Option: relax_column_count must be a boolean, got ${ JSON . stringify ( options . relax _column _count ) } ` )
}
if ( typeof options . relax _column _count _less === 'boolean' ) {
// Great, nothing to do
} else if ( options . relax _column _count _less === undefined || options . relax _column _count _less === null ) {
options . relax _column _count _less = false
} else {
throw new Error ( ` Invalid Option: relax_column_count_less must be a boolean, got ${ JSON . stringify ( options . relax _column _count _less ) } ` )
}
if ( typeof options . relax _column _count _more === 'boolean' ) {
// Great, nothing to do
} else if ( options . relax _column _count _more === undefined || options . relax _column _count _more === null ) {
options . relax _column _count _more = false
} else {
throw new Error ( ` Invalid Option: relax_column_count_more must be a boolean, got ${ JSON . stringify ( options . relax _column _count _more ) } ` )
}
// Normalize option `skip_empty_lines`
if ( typeof options . skip _empty _lines === 'boolean' ) {
// Great, nothing to do
} else if ( options . skip _empty _lines === undefined || options . skip _empty _lines === null ) {
options . skip _empty _lines = false
} else {
throw new Error ( ` Invalid Option: skip_empty_lines must be a boolean, got ${ JSON . stringify ( options . skip _empty _lines ) } ` )
}
// Normalize option `skip_lines_with_empty_values`
if ( typeof options . skip _lines _with _empty _values === 'boolean' ) {
// Great, nothing to do
} else if ( options . skip _lines _with _empty _values === undefined || options . skip _lines _with _empty _values === null ) {
options . skip _lines _with _empty _values = false
} else {
throw new Error ( ` Invalid Option: skip_lines_with_empty_values must be a boolean, got ${ JSON . stringify ( options . skip _lines _with _empty _values ) } ` )
}
// Normalize option `skip_lines_with_error`
if ( typeof options . skip _lines _with _error === 'boolean' ) {
// Great, nothing to do
} else if ( options . skip _lines _with _error === undefined || options . skip _lines _with _error === null ) {
options . skip _lines _with _error = false
} else {
throw new Error ( ` Invalid Option: skip_lines_with_error must be a boolean, got ${ JSON . stringify ( options . skip _lines _with _error ) } ` )
}
// Normalize option `rtrim`
if ( options . rtrim === undefined || options . rtrim === null || options . rtrim === false ) {
options . rtrim = false
} else if ( options . rtrim !== true ) {
throw new Error ( ` Invalid Option: rtrim must be a boolean, got ${ JSON . stringify ( options . rtrim ) } ` )
}
// Normalize option `ltrim`
if ( options . ltrim === undefined || options . ltrim === null || options . ltrim === false ) {
options . ltrim = false
} else if ( options . ltrim !== true ) {
throw new Error ( ` Invalid Option: ltrim must be a boolean, got ${ JSON . stringify ( options . ltrim ) } ` )
}
// Normalize option `trim`
if ( options . trim === undefined || options . trim === null || options . trim === false ) {
options . trim = false
} else if ( options . trim !== true ) {
throw new Error ( ` Invalid Option: trim must be a boolean, got ${ JSON . stringify ( options . trim ) } ` )
}
// Normalize options `trim`, `ltrim` and `rtrim`
if ( options . trim === true && opts . ltrim !== false ) {
options . ltrim = true
} else if ( options . ltrim !== true ) {
options . ltrim = false
}
if ( options . trim === true && opts . rtrim !== false ) {
options . rtrim = true
} else if ( options . rtrim !== true ) {
options . rtrim = false
}
// Normalize option `to`
if ( options . to === undefined || options . to === null ) {
options . to = - 1
} else {
if ( typeof options . to === 'string' && /\d+/ . test ( options . to ) ) {
options . to = parseInt ( options . to )
}
if ( Number . isInteger ( options . to ) ) {
if ( options . to <= 0 ) {
throw new Error ( ` Invalid Option: to must be a positive integer greater than 0, got ${ JSON . stringify ( opts . to ) } ` )
}
} else {
throw new Error ( ` Invalid Option: to must be an integer, got ${ JSON . stringify ( opts . to ) } ` )
}
}
// Normalize option `to_line`
if ( options . to _line === undefined || options . to _line === null ) {
options . to _line = - 1
} else {
if ( typeof options . to _line === 'string' && /\d+/ . test ( options . to _line ) ) {
options . to _line = parseInt ( options . to _line )
}
if ( Number . isInteger ( options . to _line ) ) {
if ( options . to _line <= 0 ) {
throw new Error ( ` Invalid Option: to_line must be a positive integer greater than 0, got ${ JSON . stringify ( opts . to _line ) } ` )
}
} else {
throw new Error ( ` Invalid Option: to_line must be an integer, got ${ JSON . stringify ( opts . to _line ) } ` )
}
}
this . info = {
comment _lines : 0 ,
empty _lines : 0 ,
invalid _field _length : 0 ,
lines : 1 ,
records : 0
}
this . options = options
this . state = {
bomSkipped : false ,
castField : fnCastField ,
commenting : false ,
enabled : options . from _line === 1 ,
escaping : false ,
escapeIsQuote : options . escape === options . quote ,
expectedRecordLength : options . columns === null ? 0 : options . columns . length ,
field : new ResizeableBuffer ( 20 ) ,
firstLineToHeaders : fnFirstLineToHeaders ,
info : Object . assign ( { } , this . info ) ,
previousBuf : undefined ,
quoting : false ,
stop : false ,
rawBuffer : new ResizeableBuffer ( 100 ) ,
record : [ ] ,
recordHasError : false ,
record _length : 0 ,
recordDelimiterMaxLength : options . record _delimiter . length === 0 ? 2 : Math . max ( ... options . record _delimiter . map ( ( v ) => v . length ) ) ,
trimChars : [ Buffer . from ( ' ' ) [ 0 ] , Buffer . from ( '\t' ) [ 0 ] ] ,
wasQuoting : false ,
wasRowDelimiter : false
}
}
// Implementation of `Transform._transform`
_transform ( buf , encoding , callback ) {
if ( this . state . stop === true ) {
return
}
const err = this . _ _parse ( buf , false )
if ( err !== undefined ) {
this . state . stop = true
}
callback ( err )
}
// Implementation of `Transform._flush`
_flush ( callback ) {
if ( this . state . stop === true ) {
return
}
const err = this . _ _parse ( undefined , true )
callback ( err )
}
// Central parser implementation
_ _parse ( nextBuf , end ) {
const { bom , comment , escape , from _line , info , ltrim , max _record _size , quote , raw , relax , rtrim , skip _empty _lines , to , to _line } = this . options
let { record _delimiter } = this . options
const { bomSkipped , previousBuf , rawBuffer , escapeIsQuote } = this . state
let buf
if ( previousBuf === undefined ) {
if ( nextBuf === undefined ) {
// Handle empty string
this . push ( null )
return
} else {
buf = nextBuf
}
} else if ( previousBuf !== undefined && nextBuf === undefined ) {
buf = previousBuf
} else {
buf = Buffer . concat ( [ previousBuf , nextBuf ] )
}
// Handle UTF BOM
if ( bomSkipped === false ) {
if ( bom === false ) {
this . state . bomSkipped = true
} else if ( buf . length < 3 ) {
// No enough data
if ( end === false ) {
// Wait for more data
this . state . previousBuf = buf
return
}
// skip BOM detect because data length < 3
} else {
if ( bom _utf8 . compare ( buf , 0 , 3 ) === 0 ) {
// Skip BOM
buf = buf . slice ( 3 )
}
this . state . bomSkipped = true
}
}
const bufLen = buf . length
let pos
for ( pos = 0 ; pos < bufLen ; pos ++ ) {
// Ensure we get enough space to look ahead
// There should be a way to move this out of the loop
if ( this . _ _needMoreData ( pos , bufLen , end ) ) {
break
}
if ( this . state . wasRowDelimiter === true ) {
this . info . lines ++
if ( info === true && this . state . record . length === 0 && this . state . field . length === 0 && this . state . wasQuoting === false ) {
this . state . info = Object . assign ( { } , this . info )
}
this . state . wasRowDelimiter = false
}
if ( to _line !== - 1 && this . info . lines > to _line ) {
this . state . stop = true
this . push ( null )
return
}
// Auto discovery of record_delimiter, unix, mac and windows supported
if ( this . state . quoting === false && record _delimiter . length === 0 ) {
const record _delimiterCount = this . _ _autoDiscoverRowDelimiter ( buf , pos )
if ( record _delimiterCount ) {
record _delimiter = this . options . record _delimiter
}
}
const chr = buf [ pos ]
if ( raw === true ) {
rawBuffer . append ( chr )
}
if ( ( chr === cr || chr === nl ) && this . state . wasRowDelimiter === false ) {
this . state . wasRowDelimiter = true
}
// Previous char was a valid escape char
// treat the current char as a regular char
if ( this . state . escaping === true ) {
this . state . escaping = false
} else {
// Escape is only active inside quoted fields
// We are quoting, the char is an escape chr and there is a chr to escape
if ( escape !== null && this . state . quoting === true && chr === escape && pos + 1 < bufLen ) {
if ( escapeIsQuote ) {
if ( buf [ pos + 1 ] === quote ) {
this . state . escaping = true
continue
}
} else {
this . state . escaping = true
continue
}
}
// Not currently escaping and chr is a quote
// TODO: need to compare bytes instead of single char
if ( this . state . commenting === false && chr === quote ) {
if ( this . state . quoting === true ) {
const nextChr = buf [ pos + 1 ]
const isNextChrTrimable = rtrim && this . _ _isCharTrimable ( nextChr )
// const isNextChrComment = nextChr === comment
const isNextChrComment = comment !== null && this . _ _compareBytes ( comment , buf , pos + 1 , nextChr )
const isNextChrDelimiter = this . _ _isDelimiter ( nextChr , buf , pos + 1 )
const isNextChrRowDelimiter = record _delimiter . length === 0 ? this . _ _autoDiscoverRowDelimiter ( buf , pos + 1 ) : this . _ _isRecordDelimiter ( nextChr , buf , pos + 1 )
// Escape a quote
// Treat next char as a regular character
// TODO: need to compare bytes instead of single char
if ( escape !== null && chr === escape && nextChr === quote ) {
pos ++
} else if ( ! nextChr || isNextChrDelimiter || isNextChrRowDelimiter || isNextChrComment || isNextChrTrimable ) {
this . state . quoting = false
this . state . wasQuoting = true
continue
} else if ( relax === false ) {
const err = this . _ _error (
new CsvError ( 'CSV_INVALID_CLOSING_QUOTE' , [
'Invalid Closing Quote:' ,
` got " ${ String . fromCharCode ( nextChr ) } " ` ,
` at line ${ this . info . lines } ` ,
'instead of delimiter, row delimiter, trimable character' ,
'(if activated) or comment' ,
] , this . _ _context ( ) )
)
if ( err !== undefined ) return err
} else {
this . state . quoting = false
this . state . wasQuoting = true
// continue
this . state . field . prepend ( quote )
}
} else {
if ( this . state . field . length !== 0 ) {
// In relax mode, treat opening quote preceded by chrs as regular
if ( relax === false ) {
const err = this . _ _error (
new CsvError ( 'INVALID_OPENING_QUOTE' , [
'Invalid Opening Quote:' ,
` a quote is found inside a field at line ${ this . info . lines } ` ,
] , this . _ _context ( ) , {
field : this . state . field ,
} )
)
if ( err !== undefined ) return err
}
} else {
this . state . quoting = true
continue
}
}
}
if ( this . state . quoting === false ) {
let recordDelimiterLength = this . _ _isRecordDelimiter ( chr , buf , pos )
if ( recordDelimiterLength !== 0 ) {
// Do not emit comments which take a full line
const skipCommentLine = this . state . commenting && ( this . state . wasQuoting === false && this . state . record . length === 0 && this . state . field . length === 0 )
if ( skipCommentLine ) {
this . info . comment _lines ++
// Skip full comment line
} else {
// Skip if line is empty and skip_empty_lines activated
if ( skip _empty _lines === true && this . state . wasQuoting === false && this . state . record . length === 0 && this . state . field . length === 0 ) {
this . info . empty _lines ++
pos += recordDelimiterLength - 1
continue
}
// Activate records emition if above from_line
if ( this . state . enabled === false && this . info . lines + ( this . state . wasRowDelimiter === true ? 1 : 0 ) >= from _line ) {
this . state . enabled = true
this . _ _resetField ( )
this . _ _resetRow ( )
pos += recordDelimiterLength - 1
continue
} else {
const errField = this . _ _onField ( )
if ( errField !== undefined ) return errField
const errRecord = this . _ _onRow ( )
if ( errRecord !== undefined ) return errRecord
}
if ( to !== - 1 && this . info . records >= to ) {
this . state . stop = true
this . push ( null )
return
}
}
this . state . commenting = false
pos += recordDelimiterLength - 1
continue
}
if ( this . state . commenting ) {
continue
}
const commentCount = comment === null ? 0 : this . _ _compareBytes ( comment , buf , pos , chr )
if ( commentCount !== 0 ) {
this . state . commenting = true
continue
}
let delimiterLength = this . _ _isDelimiter ( chr , buf , pos )
if ( delimiterLength !== 0 ) {
const errField = this . _ _onField ( )
if ( errField !== undefined ) return errField
pos += delimiterLength - 1
continue
}
}
}
if ( this . state . commenting === false ) {
if ( max _record _size !== 0 && this . state . record _length + this . state . field . length > max _record _size ) {
const err = this . _ _error (
new CsvError ( 'CSV_MAX_RECORD_SIZE' , [
'Max Record Size:' ,
'record exceed the maximum number of tolerated bytes' ,
` of ${ max _record _size } ` ,
` at line ${ this . info . lines } ` ,
] , this . _ _context ( ) )
)
if ( err !== undefined ) return err
}
}
const lappend = ltrim === false || this . state . quoting === true || this . state . field . length !== 0 || ! this . _ _isCharTrimable ( chr )
// rtrim in non quoting is handle in __onField
const rappend = rtrim === false || this . state . wasQuoting === false
if ( lappend === true && rappend === true ) {
this . state . field . append ( chr )
} else if ( rtrim === true && ! this . _ _isCharTrimable ( chr ) ) {
const err = this . _ _error (
new CsvError ( 'CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE' , [
'Invalid Closing Quote:' ,
'found non trimable byte after quote' ,
` at line ${ this . info . lines } ` ,
] , this . _ _context ( ) )
)
if ( err !== undefined ) return err
}
}
if ( end === true ) {
// Ensure we are not ending in a quoting state
if ( this . state . quoting === true ) {
const err = this . _ _error (
new CsvError ( 'CSV_QUOTE_NOT_CLOSED' , [
'Quote Not Closed:' ,
` the parsing is finished with an opening quote at line ${ this . info . lines } ` ,
] , this . _ _context ( ) )
)
if ( err !== undefined ) return err
} else {
// Skip last line if it has no characters
if ( this . state . wasQuoting === true || this . state . record . length !== 0 || this . state . field . length !== 0 ) {
const errField = this . _ _onField ( )
if ( errField !== undefined ) return errField
const errRecord = this . _ _onRow ( )
if ( errRecord !== undefined ) return errRecord
} else if ( this . state . wasRowDelimiter === true ) {
this . info . empty _lines ++
} else if ( this . state . commenting === true ) {
this . info . comment _lines ++
}
}
} else {
this . state . previousBuf = buf . slice ( pos )
}
if ( this . state . wasRowDelimiter === true ) {
this . info . lines ++
this . state . wasRowDelimiter = false
}
}
// Helper to test if a character is a space or a line delimiter
_ _isCharTrimable ( chr ) {
return chr === space || chr === tab || chr === cr || chr === nl || chr === np
}
_ _onRow ( ) {
const { columns , columns _duplicates _to _array , info , from , relax _column _count , relax _column _count _less , relax _column _count _more , raw , skip _lines _with _empty _values } = this . options
const { enabled , record } = this . state
if ( enabled === false ) {
return this . _ _resetRow ( )
}
// Convert the first line into column names
const recordLength = record . length
if ( columns === true ) {
if ( isRecordEmpty ( record ) ) {
this . _ _resetRow ( )
return
}
return this . _ _firstLineToColumns ( record )
}
if ( columns === false && this . info . records === 0 ) {
this . state . expectedRecordLength = recordLength
}
if ( recordLength !== this . state . expectedRecordLength ) {
if ( relax _column _count === true ||
( relax _column _count _less === true && recordLength < this . state . expectedRecordLength ) ||
( relax _column _count _more === true && recordLength > this . state . expectedRecordLength ) ) {
this . info . invalid _field _length ++
} else {
if ( columns === false ) {
const err = this . _ _error (
new CsvError ( 'CSV_INCONSISTENT_RECORD_LENGTH' , [
'Invalid Record Length:' ,
` expect ${ this . state . expectedRecordLength } , ` ,
` got ${ recordLength } on line ${ this . info . lines } ` ,
] , this . _ _context ( ) , {
record : record ,
} )
)
if ( err !== undefined ) return err
} else {
const err = this . _ _error (
// CSV_INVALID_RECORD_LENGTH_DONT_MATCH_COLUMNS
new CsvError ( 'CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH' , [
'Invalid Record Length:' ,
` columns length is ${ columns . length } , ` , // rename columns
` got ${ recordLength } on line ${ this . info . lines } ` ,
] , this . _ _context ( ) , {
record : record ,
} )
)
if ( err !== undefined ) return err
}
}
}
if ( skip _lines _with _empty _values === true ) {
if ( isRecordEmpty ( record ) ) {
this . _ _resetRow ( )
return
}
}
if ( this . state . recordHasError === true ) {
this . _ _resetRow ( )
this . state . recordHasError = false
return
}
this . info . records ++
if ( from === 1 || this . info . records >= from ) {
if ( columns !== false ) {
const obj = { }
// Transform record array to an object
for ( let i = 0 , l = record . length ; i < l ; i ++ ) {
if ( columns [ i ] === undefined || columns [ i ] . disabled ) continue
// obj[columns[i].name] = record[i]
// Turn duplicate columns into an array
if ( columns _duplicates _to _array === true && obj [ columns [ i ] . name ] ) {
if ( Array . isArray ( obj [ columns [ i ] . name ] ) ) {
obj [ columns [ i ] . name ] = obj [ columns [ i ] . name ] . concat ( record [ i ] )
} else {
obj [ columns [ i ] . name ] = [ obj [ columns [ i ] . name ] , record [ i ] ]
}
} else {
obj [ columns [ i ] . name ] = record [ i ]
}
}
const { objname } = this . options
if ( objname === undefined ) {
if ( raw === true || info === true ) {
const err = this . _ _push ( Object . assign (
{ record : obj } ,
( raw === true ? { raw : this . state . rawBuffer . toString ( ) } : { } ) ,
( info === true ? { info : this . state . info } : { } )
) )
if ( err ) {
return err
}
} else {
const err = this . _ _push ( obj )
if ( err ) {
return err
}
}
} else {
if ( raw === true || info === true ) {
const err = this . _ _push ( Object . assign (
{ record : [ obj [ objname ] , obj ] } ,
raw === true ? { raw : this . state . rawBuffer . toString ( ) } : { } ,
info === true ? { info : this . state . info } : { }
) )
if ( err ) {
return err
}
} else {
const err = this . _ _push ( [ obj [ objname ] , obj ] )
if ( err ) {
return err
}
}
}
} else {
if ( raw === true || info === true ) {
const err = this . _ _push ( Object . assign (
{ record : record } ,
raw === true ? { raw : this . state . rawBuffer . toString ( ) } : { } ,
info === true ? { info : this . state . info } : { }
) )
if ( err ) {
return err
}
} else {
const err = this . _ _push ( record )
if ( err ) {
return err
}
}
}
}
this . _ _resetRow ( )
}
_ _firstLineToColumns ( record ) {
const { firstLineToHeaders } = this . state
try {
const headers = firstLineToHeaders === undefined ? record : firstLineToHeaders . call ( null , record )
if ( ! Array . isArray ( headers ) ) {
return this . _ _error (
new CsvError ( 'CSV_INVALID_COLUMN_MAPPING' , [
'Invalid Column Mapping:' ,
'expect an array from column function,' ,
` got ${ JSON . stringify ( headers ) } `
] , this . _ _context ( ) , {
headers : headers ,
} )
)
}
const normalizedHeaders = normalizeColumnsArray ( headers )
this . state . expectedRecordLength = normalizedHeaders . length
this . options . columns = normalizedHeaders
this . _ _resetRow ( )
return
} catch ( err ) {
return err
}
}
_ _resetRow ( ) {
if ( this . options . raw === true ) {
this . state . rawBuffer . reset ( )
}
this . state . record = [ ]
this . state . record _length = 0
}
_ _onField ( ) {
const { cast , rtrim , max _record _size } = this . options
const { enabled , wasQuoting } = this . state
// Short circuit for the from_line options
if ( enabled === false ) { /* this.options.columns !== true && */
return this . _ _resetField ( )
}
let field = this . state . field . toString ( )
if ( rtrim === true && wasQuoting === false ) {
field = field . trimRight ( )
}
if ( cast === true ) {
const [ err , f ] = this . _ _cast ( field )
if ( err !== undefined ) return err
field = f
}
this . state . record . push ( field )
// Increment record length if record size must not exceed a limit
if ( max _record _size !== 0 && typeof field === 'string' ) {
this . state . record _length += field . length
}
this . _ _resetField ( )
}
_ _resetField ( ) {
this . state . field . reset ( )
this . state . wasQuoting = false
}
_ _push ( record ) {
const { on _record } = this . options
if ( on _record !== undefined ) {
const context = this . _ _context ( )
try {
record = on _record . call ( null , record , context )
} catch ( err ) {
return err
}
if ( record === undefined || record === null ) { return }
}
this . push ( record )
}
// Return a tuple with the error and the casted value
_ _cast ( field ) {
const { columns , relax _column _count } = this . options
const isColumns = Array . isArray ( columns )
// Dont loose time calling cast
// because the final record is an object
// and this field can't be associated to a key present in columns
if ( isColumns === true && relax _column _count && this . options . columns . length <= this . state . record . length ) {
return [ undefined , undefined ]
}
const context = this . _ _context ( )
if ( this . state . castField !== null ) {
try {
return [ undefined , this . state . castField . call ( null , field , context ) ]
} catch ( err ) {
return [ err ]
}
}
if ( this . _ _isFloat ( field ) ) {
return [ undefined , parseFloat ( field ) ]
} else if ( this . options . cast _date !== false ) {
return [ undefined , this . options . cast _date . call ( null , field , context ) ]
}
return [ undefined , field ]
}
// Keep it in case we implement the `cast_int` option
// __isInt(value){
// // return Number.isInteger(parseInt(value))
// // return !isNaN( parseInt( obj ) );
// return /^(\-|\+)?[1-9][0-9]*$/.test(value)
// }
_ _isFloat ( value ) {
return ( value - parseFloat ( value ) + 1 ) >= 0 // Borrowed from jquery
}
_ _compareBytes ( sourceBuf , targetBuf , pos , firtByte ) {
if ( sourceBuf [ 0 ] !== firtByte ) return 0
const sourceLength = sourceBuf . length
for ( let i = 1 ; i < sourceLength ; i ++ ) {
if ( sourceBuf [ i ] !== targetBuf [ pos + i ] ) return 0
}
return sourceLength
}
_ _needMoreData ( i , bufLen , end ) {
if ( end ) {
return false
}
const { comment , delimiter } = this . options
const { quoting , recordDelimiterMaxLength } = this . state
const numOfCharLeft = bufLen - i - 1
const requiredLength = Math . max (
// Skip if the remaining buffer smaller than comment
comment ? comment . length : 0 ,
// Skip if the remaining buffer smaller than row delimiter
recordDelimiterMaxLength ,
// Skip if the remaining buffer can be row delimiter following the closing quote
// 1 is for quote.length
quoting ? ( 1 + recordDelimiterMaxLength ) : 0 ,
// Skip if the remaining buffer can be delimiter
delimiter . length ,
// Skip if the remaining buffer can be escape sequence
// 1 is for escape.length
1
)
return numOfCharLeft < requiredLength
}
_ _isDelimiter ( chr , buf , pos ) {
const { delimiter } = this . options
loop1 : for ( let i = 0 ; i < delimiter . length ; i ++ ) {
const del = delimiter [ i ]
if ( del [ 0 ] === chr ) {
for ( let j = 1 ; j < del . length ; j ++ ) {
if ( del [ j ] !== buf [ pos + j ] ) continue loop1
}
return del . length
}
}
return 0
}
_ _isRecordDelimiter ( chr , buf , pos ) {
const { record _delimiter } = this . options
const recordDelimiterLength = record _delimiter . length
loop1 : for ( let i = 0 ; i < recordDelimiterLength ; i ++ ) {
const rd = record _delimiter [ i ]
const rdLength = rd . length
if ( rd [ 0 ] !== chr ) {
continue
}
for ( let j = 1 ; j < rdLength ; j ++ ) {
if ( rd [ j ] !== buf [ pos + j ] ) {
continue loop1
}
}
return rd . length
}
return 0
}
_ _autoDiscoverRowDelimiter ( buf , pos ) {
const chr = buf [ pos ]
if ( chr === cr ) {
if ( buf [ pos + 1 ] === nl ) {
this . options . record _delimiter . push ( Buffer . from ( '\r\n' ) )
this . state . recordDelimiterMaxLength = 2
return 2
} else {
this . options . record _delimiter . push ( Buffer . from ( '\r' ) )
this . state . recordDelimiterMaxLength = 1
return 1
}
} else if ( chr === nl ) {
this . options . record _delimiter . push ( Buffer . from ( '\n' ) )
this . state . recordDelimiterMaxLength = 1
return 1
}
return 0
}
_ _error ( msg ) {
const { skip _lines _with _error } = this . options
const err = typeof msg === 'string' ? new Error ( msg ) : msg
if ( skip _lines _with _error ) {
this . state . recordHasError = true
this . emit ( 'skip' , err )
return undefined
} else {
return err
}
}
_ _context ( ) {
const { columns } = this . options
const isColumns = Array . isArray ( columns )
return {
column : isColumns === true ?
( columns . length > this . state . record . length ?
columns [ this . state . record . length ] . name :
null
) :
this . state . record . length ,
empty _lines : this . info . empty _lines ,
header : columns === true ,
index : this . state . record . length ,
invalid _field _length : this . info . invalid _field _length ,
quoting : this . state . wasQuoting ,
lines : this . info . lines ,
records : this . info . records
}
}
}
const parse = function ( ) {
let data , options , callback
for ( let i in arguments ) {
const argument = arguments [ i ]
const type = typeof argument
if ( data === undefined && ( typeof argument === 'string' || Buffer . isBuffer ( argument ) ) ) {
data = argument
} else if ( options === undefined && isObject ( argument ) ) {
options = argument
} else if ( callback === undefined && type === 'function' ) {
callback = argument
} else {
throw new CsvError ( 'CSV_INVALID_ARGUMENT' , [
'Invalid argument:' ,
` got ${ JSON . stringify ( argument ) } at index ${ i } `
] )
}
}
const parser = new Parser ( options )
if ( callback ) {
const records = options === undefined || options . objname === undefined ? [ ] : { }
parser . on ( 'readable' , function ( ) {
let record
while ( ( record = this . read ( ) ) !== null ) {
if ( options === undefined || options . objname === undefined ) {
records . push ( record )
} else {
records [ record [ 0 ] ] = record [ 1 ]
}
}
} )
parser . on ( 'error' , function ( err ) {
callback ( err , undefined , parser . info )
} )
parser . on ( 'end' , function ( ) {
callback ( undefined , records , parser . info )
} )
}
if ( data !== undefined ) {
// Give a chance for events to be registered later
if ( typeof setImmediate === 'function' ) {
setImmediate ( function ( ) {
parser . write ( data )
parser . end ( )
} )
} else {
parser . write ( data )
parser . end ( )
}
}
return parser
}
class CsvError extends Error {
constructor ( code , message , ... contexts ) {
if ( Array . isArray ( message ) ) message = message . join ( ' ' )
super ( message )
if ( Error . captureStackTrace !== undefined ) {
Error . captureStackTrace ( this , CsvError )
}
this . code = code
for ( const context of contexts ) {
for ( const key in context ) {
const value = context [ key ]
this [ key ] = Buffer . isBuffer ( value ) ? value . toString ( ) : value == null ? value : JSON . parse ( JSON . stringify ( value ) )
}
}
}
}
parse . Parser = Parser
parse . CsvError = CsvError
module . exports = parse
const underscore = function ( str ) {
return str . replace ( /([A-Z])/g , function ( _ , match ) {
return '_' + match . toLowerCase ( )
} )
}
const isObject = function ( obj ) {
return ( typeof obj === 'object' && obj !== null && ! Array . isArray ( obj ) )
}
const isRecordEmpty = function ( record ) {
return record . every ( ( field ) => field == null || field . toString && field . toString ( ) . trim ( ) === '' )
}
const normalizeColumnsArray = function ( columns ) {
const normalizedColumns = [ ] ;
for ( let i = 0 , l = columns . length ; i < l ; i ++ ) {
const column = columns [ i ]
if ( column === undefined || column === null || column === false ) {
normalizedColumns [ i ] = { disabled : true }
} else if ( typeof column === 'string' ) {
normalizedColumns [ i ] = { name : column }
} else if ( isObject ( column ) ) {
if ( typeof column . name !== 'string' ) {
throw new CsvError ( 'CSV_OPTION_COLUMNS_MISSING_NAME' , [
'Option columns missing name:' ,
` property "name" is required at position ${ i } ` ,
'when column is an object literal'
] )
}
normalizedColumns [ i ] = column
} else {
throw new CsvError ( 'CSV_INVALID_COLUMN_DEFINITION' , [
'Invalid column definition:' ,
'expect a string or a literal object,' ,
` got ${ JSON . stringify ( column ) } at position ${ i } `
] )
}
}
return normalizedColumns ;
}
2020-08-16 19:13:19 +02:00
/***/ } ) ,
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
/***/ 832 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
const Range = _ _webpack _require _ _ ( 828 )
const minSatisfying = ( versions , range , options ) => {
let min = null
let minSV = null
let rangeObj = null
try {
rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( ( v ) => {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! min || minSV . compare ( v ) === 1 ) {
// compare(min, v, true)
min = v
minSV = new SemVer ( min , options )
}
}
} )
return min
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = minSatisfying
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 835 :
/***/ ( function ( module ) {
module . exports = require ( "url" ) ;
/***/ } ) ,
/***/ 842 :
2020-08-21 13:39:42 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-10-19 21:17:06 +02:00
exports . asyncForEach = exports . getInputList = exports . getArgs = exports . getInputs = exports . tmpNameSync = exports . tmpDir = exports . defaultContext = void 0 ;
2020-09-02 10:07:11 +02:00
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
2020-08-23 03:31:38 +02:00
const semver = _ _importStar ( _ _webpack _require _ _ ( 383 ) ) ;
2020-10-19 21:17:06 +02:00
const tmp = _ _importStar ( _ _webpack _require _ _ ( 517 ) ) ;
2020-09-02 10:07:11 +02:00
const buildx = _ _importStar ( _ _webpack _require _ _ ( 295 ) ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 186 ) ) ;
const github = _ _importStar ( _ _webpack _require _ _ ( 438 ) ) ;
2020-10-21 02:46:41 +02:00
let _defaultContext , _tmpDir ;
2020-10-19 21:17:06 +02:00
function defaultContext ( ) {
var _a , _b ;
2020-10-21 02:46:41 +02:00
if ( ! _defaultContext ) {
_defaultContext = ` https://github.com/ ${ github . context . repo . owner } / ${ github . context . repo . repo } .git# ${ ( _b = ( _a = github . context ) === null || _a === void 0 ? void 0 : _a . ref ) === null || _b === void 0 ? void 0 : _b . replace ( /^refs\// , '' ) } ` ;
}
return _defaultContext ;
2020-10-19 21:17:06 +02:00
}
exports . defaultContext = defaultContext ;
function tmpDir ( ) {
2020-10-21 02:46:41 +02:00
if ( ! _tmpDir ) {
_tmpDir = fs . mkdtempSync ( path . join ( os . tmpdir ( ) , 'docker-build-push-' ) ) . split ( path . sep ) . join ( path . posix . sep ) ;
}
return _tmpDir ;
2020-10-19 21:17:06 +02:00
}
exports . tmpDir = tmpDir ;
function tmpNameSync ( options ) {
return tmp . tmpNameSync ( options ) ;
}
exports . tmpNameSync = tmpNameSync ;
function getInputs ( defaultContext ) {
2020-08-21 13:39:42 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
return {
2020-09-22 20:49:18 +02:00
context : core . getInput ( 'context' ) || defaultContext ,
2020-09-02 10:07:11 +02:00
file : core . getInput ( 'file' ) || 'Dockerfile' ,
2020-10-20 19:04:54 +02:00
buildArgs : yield getInputList ( 'build-args' , true ) ,
labels : yield getInputList ( 'labels' , true ) ,
2020-09-02 10:07:11 +02:00
tags : yield getInputList ( 'tags' ) ,
pull : /true/i . test ( core . getInput ( 'pull' ) ) ,
target : core . getInput ( 'target' ) ,
allow : yield getInputList ( 'allow' ) ,
noCache : /true/i . test ( core . getInput ( 'no-cache' ) ) ,
builder : core . getInput ( 'builder' ) ,
platforms : yield getInputList ( 'platforms' ) ,
load : /true/i . test ( core . getInput ( 'load' ) ) ,
push : /true/i . test ( core . getInput ( 'push' ) ) ,
outputs : yield getInputList ( 'outputs' , true ) ,
cacheFrom : yield getInputList ( 'cache-from' , true ) ,
cacheTo : yield getInputList ( 'cache-to' , true ) ,
2020-09-22 20:49:18 +02:00
secrets : yield getInputList ( 'secrets' , true ) ,
2020-09-11 01:23:49 +02:00
githubToken : core . getInput ( 'github-token' ) ,
ssh : yield getInputList ( 'ssh' )
2020-09-02 10:07:11 +02:00
} ;
2020-08-21 13:39:42 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
exports . getInputs = getInputs ;
2020-10-19 21:17:06 +02:00
function getArgs ( inputs , defaultContext , buildxVersion ) {
2020-08-21 13:39:42 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
let args = [ 'buildx' ] ;
2020-10-19 21:17:06 +02:00
args . push . apply ( args , yield getBuildArgs ( inputs , defaultContext , buildxVersion ) ) ;
2020-09-02 10:07:11 +02:00
args . push . apply ( args , yield getCommonArgs ( inputs ) ) ;
args . push ( inputs . context ) ;
return args ;
2020-08-21 13:39:42 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
exports . getArgs = getArgs ;
2020-10-19 21:17:06 +02:00
function getBuildArgs ( inputs , defaultContext , buildxVersion ) {
2020-08-23 03:31:38 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
let args = [ 'build' ] ;
yield exports . asyncForEach ( inputs . buildArgs , ( buildArg ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--build-arg' , buildArg ) ;
} ) ) ;
yield exports . asyncForEach ( inputs . labels , ( label ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--label' , label ) ;
} ) ) ;
yield exports . asyncForEach ( inputs . tags , ( tag ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--tag' , tag ) ;
} ) ) ;
if ( inputs . target ) {
args . push ( '--target' , inputs . target ) ;
}
if ( inputs . allow . length > 0 ) {
args . push ( '--allow' , inputs . allow . join ( ',' ) ) ;
}
if ( inputs . platforms . length > 0 ) {
args . push ( '--platform' , inputs . platforms . join ( ',' ) ) ;
}
yield exports . asyncForEach ( inputs . outputs , ( output ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--output' , output ) ;
} ) ) ;
2020-10-21 09:51:06 +02:00
if ( ! buildx . isLocalOrTarExporter ( inputs . outputs ) &&
( inputs . platforms . length == 0 || semver . satisfies ( buildxVersion , '>=0.4.2' ) ) ) {
2020-10-19 21:17:06 +02:00
args . push ( '--iidfile' , yield buildx . getImageIDFile ( ) ) ;
}
2020-09-02 10:07:11 +02:00
yield exports . asyncForEach ( inputs . cacheFrom , ( cacheFrom ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--cache-from' , cacheFrom ) ;
} ) ) ;
yield exports . asyncForEach ( inputs . cacheTo , ( cacheTo ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--cache-to' , cacheTo ) ;
} ) ) ;
yield exports . asyncForEach ( inputs . secrets , ( secret ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--secret' , yield buildx . getSecret ( secret ) ) ;
} ) ) ;
2020-10-19 22:12:33 +02:00
if ( inputs . githubToken && ! buildx . hasGitAuthToken ( inputs . secrets ) && inputs . context == defaultContext ) {
2020-09-22 20:49:18 +02:00
args . push ( '--secret' , yield buildx . getSecret ( ` GIT_AUTH_TOKEN= ${ inputs . githubToken } ` ) ) ;
}
2020-09-11 01:23:49 +02:00
yield exports . asyncForEach ( inputs . ssh , ( ssh ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--ssh' , ssh ) ;
} ) ) ;
2020-09-02 10:07:11 +02:00
if ( inputs . file ) {
args . push ( '--file' , inputs . file ) ;
}
return args ;
2020-08-23 03:31:38 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
function getCommonArgs ( inputs ) {
2020-08-23 03:31:38 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
let args = [ ] ;
if ( inputs . noCache ) {
args . push ( '--no-cache' ) ;
2020-08-23 03:31:38 +02:00
}
2020-09-03 11:49:39 +02:00
if ( inputs . builder ) {
args . push ( '--builder' , inputs . builder ) ;
}
2020-09-02 10:07:11 +02:00
if ( inputs . pull ) {
args . push ( '--pull' ) ;
}
if ( inputs . load ) {
args . push ( '--load' ) ;
}
if ( inputs . push ) {
args . push ( '--push' ) ;
}
return args ;
2020-08-23 03:31:38 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
function getInputList ( name , ignoreComma ) {
2020-08-21 13:39:42 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
const items = core . getInput ( name ) ;
if ( items == '' ) {
return [ ] ;
}
return items
. split ( /\r?\n/ )
2020-09-05 02:52:09 +02:00
. filter ( x => x )
. reduce ( ( acc , line ) => acc . concat ( ! ignoreComma ? line . split ( ',' ) . filter ( x => x ) : line ) . map ( pat => pat . trim ( ) ) , [ ] ) ;
2020-08-21 13:39:42 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
exports . getInputList = getInputList ;
exports . asyncForEach = ( array , callback ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
for ( let index = 0 ; index < array . length ; index ++ ) {
yield callback ( array [ index ] , index , array ) ;
}
} ) ;
//# sourceMappingURL=context.js.map
/***/ } ) ,
/***/ 848 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const parse = _ _webpack _require _ _ ( 925 )
const clean = ( version , options ) => {
const s = parse ( version . trim ( ) . replace ( /^[=v]+/ , '' ) , options )
return s ? s . version : null
}
module . exports = clean
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 863 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-10-19 21:17:06 +02:00
const Range = _ _webpack _require _ _ ( 828 )
const { ANY } = _ _webpack _require _ _ ( 532 )
const satisfies = _ _webpack _require _ _ ( 55 )
const compare = _ _webpack _require _ _ ( 309 )
2020-09-02 10:07:11 +02:00
2020-10-19 21:17:06 +02:00
// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:
// - Every simple range `r1, r2, ...` is a subset of some `R1, R2, ...`
//
// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:
// - If c is only the ANY comparator
// - If C is only the ANY comparator, return true
// - Else return false
// - Let EQ be the set of = comparators in c
// - If EQ is more than one, return true (null set)
// - Let GT be the highest > or >= comparator in c
// - Let LT be the lowest < or <= comparator in c
// - If GT and LT, and GT.semver > LT.semver, return true (null set)
// - If EQ
// - If GT, and EQ does not satisfy GT, return true (null set)
// - If LT, and EQ does not satisfy LT, return true (null set)
// - If EQ satisfies every C, return true
// - Else return false
// - If GT
// - If GT is lower than any > or >= comp in C, return false
// - If GT is >=, and GT.semver does not satisfy every C, return false
// - If LT
// - If LT.semver is greater than that of any > comp in C, return false
// - If LT is <=, and LT.semver does not satisfy every C, return false
// - If any C is a = range, and GT or LT are set, return false
// - Else return true
2020-09-02 10:07:11 +02:00
2020-10-19 21:17:06 +02:00
const subset = ( sub , dom , options ) => {
sub = new Range ( sub , options )
dom = new Range ( dom , options )
let sawNonNull = false
2020-09-02 10:07:11 +02:00
2020-10-19 21:17:06 +02:00
OUTER : for ( const simpleSub of sub . set ) {
for ( const simpleDom of dom . set ) {
const isSub = simpleSubset ( simpleSub , simpleDom , options )
sawNonNull = sawNonNull || isSub !== null
if ( isSub )
continue OUTER
}
// the null set is a subset of everything, but null simple ranges in
// a complex range should be ignored. so if we saw a non-null range,
// then we know this isn't a subset, but if EVERY simple range was null,
// then it is a subset.
if ( sawNonNull )
return false
}
return true
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-10-19 21:17:06 +02:00
const simpleSubset = ( sub , dom , options ) => {
if ( sub . length === 1 && sub [ 0 ] . semver === ANY )
return dom . length === 1 && dom [ 0 ] . semver === ANY
const eqSet = new Set ( )
let gt , lt
for ( const c of sub ) {
if ( c . operator === '>' || c . operator === '>=' )
gt = higherGT ( gt , c , options )
else if ( c . operator === '<' || c . operator === '<=' )
lt = lowerLT ( lt , c , options )
else
eqSet . add ( c . semver )
2020-08-23 03:31:38 +02:00
}
2020-10-19 21:17:06 +02:00
if ( eqSet . size > 1 )
return null
let gtltComp
if ( gt && lt ) {
gtltComp = compare ( gt . semver , lt . semver , options )
if ( gtltComp > 0 )
return null
else if ( gtltComp === 0 && ( gt . operator !== '>=' || lt . operator !== '<=' ) )
return null
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-10-19 21:17:06 +02:00
// will iterate one or zero times
for ( const eq of eqSet ) {
if ( gt && ! satisfies ( eq , String ( gt ) , options ) )
return null
if ( lt && ! satisfies ( eq , String ( lt ) , options ) )
return null
for ( const c of dom ) {
if ( ! satisfies ( eq , String ( c ) , options ) )
return false
}
return true
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-10-19 21:17:06 +02:00
let higher , lower
let hasDomLT , hasDomGT
for ( const c of dom ) {
hasDomGT = hasDomGT || c . operator === '>' || c . operator === '>='
hasDomLT = hasDomLT || c . operator === '<' || c . operator === '<='
if ( gt ) {
if ( c . operator === '>' || c . operator === '>=' ) {
higher = higherGT ( gt , c , options )
if ( higher === c )
return false
} else if ( gt . operator === '>=' && ! satisfies ( gt . semver , String ( c ) , options ) )
return false
}
if ( lt ) {
if ( c . operator === '<' || c . operator === '<=' ) {
lower = lowerLT ( lt , c , options )
if ( lower === c )
return false
} else if ( lt . operator === '<=' && ! satisfies ( lt . semver , String ( c ) , options ) )
return false
2020-09-02 10:07:11 +02:00
}
2020-10-19 21:17:06 +02:00
if ( ! c . operator && ( lt || gt ) && gtltComp !== 0 )
return false
2020-09-02 10:07:11 +02:00
}
2020-10-19 21:17:06 +02:00
// if there was a < or >, and nothing in the dom, then must be false
// UNLESS it was limited by another range in the other direction.
// Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0
if ( gt && hasDomLT && ! lt && gtltComp !== 0 )
return false
if ( lt && hasDomGT && ! gt && gtltComp !== 0 )
return false
return true
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-10-19 21:17:06 +02:00
// >=1.2.3 is lower than >1.2.3
const higherGT = ( a , b , options ) => {
if ( ! a )
return b
const comp = compare ( a . semver , b . semver , options )
return comp > 0 ? a
: comp < 0 ? b
: b . operator === '>' && a . operator === '>=' ? b
: a
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-10-19 21:17:06 +02:00
// <=1.2.3 is higher than <1.2.3
const lowerLT = ( a , b , options ) => {
if ( ! a )
return b
const comp = compare ( a . semver , b . semver , options )
return comp < 0 ? a
: comp > 0 ? b
: b . operator === '<' && a . operator === '<=' ? b
: a
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-10-19 21:17:06 +02:00
module . exports = subset
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 866 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
const patch = ( a , loose ) => new SemVer ( a , loose ) . patch
module . exports = patch
2020-08-23 03:31:38 +02:00
2020-10-23 18:21:44 +02:00
/***/ } ) ,
/***/ 877 :
/***/ ( function ( module ) {
module . exports = eval ( "require" ) ( "encoding" ) ;
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 886 :
/***/ ( function ( module ) {
2020-08-21 13:39:42 +02:00
"use strict" ;
2020-09-02 10:07:11 +02:00
/ * !
* is - plain - object < https : //github.com/jonschlinkert/is-plain-object>
2020-08-21 13:39:42 +02:00
*
2020-09-02 10:07:11 +02:00
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
2020-08-21 13:39:42 +02:00
* /
2020-09-02 10:07:11 +02:00
function isObject ( o ) {
return Object . prototype . toString . call ( o ) === '[object Object]' ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
function isPlainObject ( o ) {
var ctor , prot ;
if ( isObject ( o ) === false ) return false ;
// If has modified constructor
ctor = o . constructor ;
if ( ctor === undefined ) return true ;
// If has modified prototype
prot = ctor . prototype ;
if ( isObject ( prot ) === false ) return false ;
// If constructor does not have an Object-specific method
if ( prot . hasOwnProperty ( 'isPrototypeOf' ) === false ) {
return false ;
}
// Most likely a plain Object
return true ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = isPlainObject ;
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 891 :
2020-08-21 13:39:42 +02:00
/***/ ( function ( module ) {
2020-09-02 10:07:11 +02:00
module . exports = function ( xs , fn ) {
var res = [ ] ;
for ( var i = 0 ; i < xs . length ; i ++ ) {
var x = fn ( xs [ i ] , i ) ;
if ( isArray ( x ) ) res . push . apply ( res , x ) ;
else res . push ( x ) ;
}
return res ;
} ;
var isArray = Array . isArray || function ( xs ) {
return Object . prototype . toString . call ( xs ) === '[object Array]' ;
} ;
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 898 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
const compare = _ _webpack _require _ _ ( 309 )
const eq = ( a , b , loose ) => compare ( a , b , loose ) === 0
module . exports = eq
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 900 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
const inc = ( version , release , options , identifier ) => {
if ( typeof ( options ) === 'string' ) {
identifier = options
options = undefined
}
try {
return new SemVer ( version , options ) . inc ( release , identifier ) . version
} catch ( er ) {
return null
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = inc
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 914 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . getApiBaseUrl = exports . getProxyAgent = exports . getAuthString = void 0 ;
const httpClient = _ _importStar ( _ _webpack _require _ _ ( 936 ) ) ;
function getAuthString ( token , options ) {
if ( ! token && ! options . auth ) {
throw new Error ( 'Parameter token or opts.auth is required' ) ;
}
else if ( token && options . auth ) {
throw new Error ( 'Parameters token and opts.auth may not both be specified' ) ;
}
return typeof options . auth === 'string' ? options . auth : ` token ${ token } ` ;
}
exports . getAuthString = getAuthString ;
function getProxyAgent ( destinationUrl ) {
const hc = new httpClient . HttpClient ( ) ;
return hc . getAgent ( destinationUrl ) ;
}
exports . getProxyAgent = getProxyAgent ;
function getApiBaseUrl ( ) {
return process . env [ 'GITHUB_API_URL' ] || 'https://api.github.com' ;
}
exports . getApiBaseUrl = getApiBaseUrl ;
//# sourceMappingURL=utils.js.map
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 925 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
const { MAX _LENGTH } = _ _webpack _require _ _ ( 293 )
const { re , t } = _ _webpack _require _ _ ( 523 )
2020-08-23 03:31:38 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
2020-09-02 10:07:11 +02:00
const parse = ( version , options ) => {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( version instanceof SemVer ) {
return version
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( typeof version !== 'string' ) {
return null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( version . length > MAX _LENGTH ) {
return null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const r = options . loose ? re [ t . LOOSE ] : re [ t . FULL ]
if ( ! r . test ( version ) ) {
return null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
try {
return new SemVer ( version , options )
} catch ( er ) {
return null
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
module . exports = parse
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 932 :
/***/ ( function ( _ _unusedmodule , exports ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
class Deprecation extends Error {
constructor ( message ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/* istanbul ignore next */
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = 'Deprecation' ;
}
}
exports . Deprecation = Deprecation ;
2020-08-23 03:31:38 +02:00
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 936 :
2020-08-21 13:39:42 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-09-02 10:07:11 +02:00
const url = _ _webpack _require _ _ ( 835 ) ;
const http = _ _webpack _require _ _ ( 605 ) ;
const https = _ _webpack _require _ _ ( 211 ) ;
const pm = _ _webpack _require _ _ ( 443 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
2020-08-21 13:39:42 +02:00
/ * *
2020-09-02 10:07:11 +02:00
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
2020-08-21 13:39:42 +02:00
* /
2020-09-02 10:07:11 +02:00
function getProxyUrl ( serverUrl ) {
let proxyUrl = pm . getProxyUrl ( url . parse ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ;
}
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
let parsedUrl = url . parse ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
2020-08-21 13:39:42 +02:00
}
}
2020-09-02 10:07:11 +02:00
}
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
}
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
}
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
}
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
let parsedUrl = url . parse ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
2020-08-21 13:39:42 +02:00
}
else {
2020-09-02 10:07:11 +02:00
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
2020-08-21 13:39:42 +02:00
}
}
2020-09-02 10:07:11 +02:00
let redirectsRemaining = this . _maxRedirects ;
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1 &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
let parsedRedirectUrl = url . parse ( redirectUrl ) ;
if ( parsedUrl . protocol == 'https:' &&
parsedUrl . protocol != parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( let header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
2020-08-21 13:39:42 +02:00
}
}
2020-09-02 10:07:11 +02:00
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
}
return response ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
if ( typeof data === 'string' ) {
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
} ) ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
let parsedUrl = url . parse ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
this . handlers . forEach ( handler => {
handler . prepareRequest ( info . options ) ;
} ) ;
}
return info ;
}
_mergeHeaders ( headers ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
tunnel = _ _webpack _require _ _ ( 294 ) ;
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
proxyAuth : proxyUrl . auth ,
host : proxyUrl . hostname ,
port : proxyUrl . port
}
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
2020-08-21 13:39:42 +02:00
}
else {
2020-09-02 10:07:11 +02:00
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
}
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
try {
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
response . headers = res . message . headers ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = 'Failed request: (' + statusCode + ')' ;
}
let err = new Error ( msg ) ;
// attach statusCode and body obj (if available) to the error object
err [ 'statusCode' ] = statusCode ;
if ( response . result ) {
err [ 'result' ] = response . result ;
}
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
exports . HttpClient = HttpClient ;
2020-08-23 03:31:38 +02:00
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 940 :
/***/ ( function ( module ) {
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module . exports = wrappy
function wrappy ( fn , cb ) {
if ( fn && cb ) return wrappy ( fn ) ( cb )
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
if ( typeof fn !== 'function' )
throw new TypeError ( 'need wrapper function' )
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
Object . keys ( fn ) . forEach ( function ( k ) {
wrapper [ k ] = fn [ k ]
} )
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
return wrapper
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
function wrapper ( ) {
var args = new Array ( arguments . length )
for ( var i = 0 ; i < args . length ; i ++ ) {
args [ i ] = arguments [ i ]
}
var ret = fn . apply ( this , args )
var cb = args [ args . length - 1 ]
if ( typeof ret === 'function' && ret !== cb ) {
Object . keys ( cb ) . forEach ( function ( k ) {
ret [ k ] = cb [ k ]
} )
}
return ret
}
2020-08-23 03:31:38 +02:00
}
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
2020-10-20 15:18:02 +02:00
/***/ } ) ,
/***/ 942 :
/***/ ( function ( module ) {
class ResizeableBuffer {
constructor ( size = 100 ) {
this . size = size
this . length = 0
this . buf = Buffer . alloc ( size )
}
prepend ( val ) {
const length = this . length ++
if ( length === this . size ) {
this . resize ( )
}
const buf = this . clone ( )
this . buf [ 0 ] = val
buf . copy ( this . buf , 1 , 0 , length )
}
append ( val ) {
const length = this . length ++
if ( length === this . size ) {
this . resize ( )
}
this . buf [ length ] = val
}
clone ( ) {
return Buffer . from ( this . buf . slice ( 0 , this . length ) )
}
resize ( ) {
const length = this . length
this . size = this . size * 2
const buf = Buffer . alloc ( this . size )
this . buf . copy ( buf , 0 , 0 , length )
this . buf = buf
}
toString ( ) {
return this . buf . slice ( 0 , this . length ) . toString ( )
}
toJSON ( ) {
return this . toString ( )
}
reset ( ) {
this . length = 0
}
}
module . exports = ResizeableBuffer
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 957 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module . exports = glob
var fs = _ _webpack _require _ _ ( 747 )
2020-10-19 21:17:06 +02:00
var rp = _ _webpack _require _ _ ( 290 )
2020-09-02 10:07:11 +02:00
var minimatch = _ _webpack _require _ _ ( 973 )
var Minimatch = minimatch . Minimatch
var inherits = _ _webpack _require _ _ ( 124 )
var EE = _ _webpack _require _ _ ( 614 ) . EventEmitter
var path = _ _webpack _require _ _ ( 622 )
var assert = _ _webpack _require _ _ ( 357 )
var isAbsolute = _ _webpack _require _ _ ( 714 )
var globSync = _ _webpack _require _ _ ( 10 )
var common = _ _webpack _require _ _ ( 625 )
var alphasort = common . alphasort
var alphasorti = common . alphasorti
var setopts = common . setopts
var ownProp = common . ownProp
var inflight = _ _webpack _require _ _ ( 492 )
var util = _ _webpack _require _ _ ( 669 )
var childrenIgnored = common . childrenIgnored
var isIgnored = common . isIgnored
var once = _ _webpack _require _ _ ( 223 )
function glob ( pattern , options , cb ) {
if ( typeof options === 'function' ) cb = options , options = { }
if ( ! options ) options = { }
if ( options . sync ) {
if ( cb )
throw new TypeError ( 'callback provided to sync glob' )
return globSync ( pattern , options )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return new Glob ( pattern , options , cb )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
glob . sync = globSync
var GlobSync = glob . GlobSync = globSync . GlobSync
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// old api surface
glob . glob = glob
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function extend ( origin , add ) {
if ( add === null || typeof add !== 'object' ) {
return origin
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var keys = Object . keys ( add )
var i = keys . length
while ( i -- ) {
origin [ keys [ i ] ] = add [ keys [ i ] ]
}
return origin
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
glob . hasMagic = function ( pattern , options _ ) {
var options = extend ( { } , options _ )
options . noprocess = true
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var g = new Glob ( pattern , options )
var set = g . minimatch . set
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! pattern )
return false
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( set . length > 1 )
return true
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
for ( var j = 0 ; j < set [ 0 ] . length ; j ++ ) {
if ( typeof set [ 0 ] [ j ] !== 'string' )
return true
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return false
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
glob . Glob = Glob
inherits ( Glob , EE )
function Glob ( pattern , options , cb ) {
if ( typeof options === 'function' ) {
cb = options
options = null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( options && options . sync ) {
if ( cb )
throw new TypeError ( 'callback provided to sync glob' )
return new GlobSync ( pattern , options )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! ( this instanceof Glob ) )
return new Glob ( pattern , options , cb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
setopts ( this , pattern , options )
this . _didRealPath = false
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// process each pattern in the minimatch set
var n = this . minimatch . set . length
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this . matches = new Array ( n )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( typeof cb === 'function' ) {
cb = once ( cb )
this . on ( 'error' , cb )
this . on ( 'end' , function ( matches ) {
cb ( null , matches )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var self = this
this . _processing = 0
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . _emitQueue = [ ]
this . _processQueue = [ ]
this . paused = false
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . noprocess )
return this
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( n === 0 )
return done ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var sync = true
for ( var i = 0 ; i < n ; i ++ ) {
this . _process ( this . minimatch . set [ i ] , i , false , done )
}
sync = false
function done ( ) {
-- self . _processing
if ( self . _processing <= 0 ) {
if ( sync ) {
process . nextTick ( function ( ) {
self . _finish ( )
} )
} else {
self . _finish ( )
}
}
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _finish = function ( ) {
assert ( this instanceof Glob )
if ( this . aborted )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . realpath && ! this . _didRealpath )
return this . _realpath ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
common . finish ( this )
this . emit ( 'end' , this . found )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _realpath = function ( ) {
if ( this . _didRealpath )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . _didRealpath = true
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var n = this . matches . length
if ( n === 0 )
return this . _finish ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var self = this
for ( var i = 0 ; i < this . matches . length ; i ++ )
this . _realpathSet ( i , next )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function next ( ) {
if ( -- n === 0 )
self . _finish ( )
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _realpathSet = function ( index , cb ) {
var matchset = this . matches [ index ]
if ( ! matchset )
return cb ( )
var found = Object . keys ( matchset )
var self = this
var n = found . length
if ( n === 0 )
return cb ( )
var set = this . matches [ index ] = Object . create ( null )
found . forEach ( function ( p , i ) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self . _makeAbs ( p )
rp . realpath ( p , self . realpathCache , function ( er , real ) {
if ( ! er )
set [ real ] = true
else if ( er . syscall === 'stat' )
set [ p ] = true
else
self . emit ( 'error' , er ) // srsly wtf right here
if ( -- n === 0 ) {
self . matches [ index ] = set
cb ( )
}
} )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _mark = function ( p ) {
return common . mark ( this , p )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _makeAbs = function ( f ) {
return common . makeAbs ( this , f )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . abort = function ( ) {
this . aborted = true
this . emit ( 'abort' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . pause = function ( ) {
if ( ! this . paused ) {
this . paused = true
this . emit ( 'pause' )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
Glob . prototype . resume = function ( ) {
if ( this . paused ) {
this . emit ( 'resume' )
this . paused = false
if ( this . _emitQueue . length ) {
var eq = this . _emitQueue . slice ( 0 )
this . _emitQueue . length = 0
for ( var i = 0 ; i < eq . length ; i ++ ) {
var e = eq [ i ]
this . _emitMatch ( e [ 0 ] , e [ 1 ] )
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
if ( this . _processQueue . length ) {
var pq = this . _processQueue . slice ( 0 )
this . _processQueue . length = 0
for ( var i = 0 ; i < pq . length ; i ++ ) {
var p = pq [ i ]
this . _processing --
this . _process ( p [ 0 ] , p [ 1 ] , p [ 2 ] , p [ 3 ] )
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _process = function ( pattern , index , inGlobStar , cb ) {
assert ( this instanceof Glob )
assert ( typeof cb === 'function' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . aborted )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . _processing ++
if ( this . paused ) {
this . _processQueue . push ( [ pattern , index , inGlobStar , cb ] )
return
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
//console.error('PROCESS %d', this._processing, pattern)
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Get the first [n] parts of pattern that are all strings.
var n = 0
while ( typeof pattern [ n ] === 'string' ) {
n ++
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch ( n ) {
// if not, then this is rather simple
case pattern . length :
this . _processSimple ( pattern . join ( '/' ) , index , cb )
return
case 0 :
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
default :
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern . slice ( 0 , n ) . join ( '/' )
break
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
var remain = pattern . slice ( n )
// get the list of entries.
var read
if ( prefix === null )
read = '.'
else if ( isAbsolute ( prefix ) || isAbsolute ( pattern . join ( '/' ) ) ) {
if ( ! prefix || ! isAbsolute ( prefix ) )
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this . _makeAbs ( read )
//if ignored, skip _processing
if ( childrenIgnored ( this , read ) )
return cb ( )
var isGlobStar = remain [ 0 ] === minimatch . GLOBSTAR
if ( isGlobStar )
this . _processGlobStar ( prefix , read , abs , remain , index , inGlobStar , cb )
else
this . _processReaddir ( prefix , read , abs , remain , index , inGlobStar , cb )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _processReaddir = function ( prefix , read , abs , remain , index , inGlobStar , cb ) {
var self = this
this . _readdir ( abs , inGlobStar , function ( er , entries ) {
return self . _processReaddir2 ( prefix , read , abs , remain , index , inGlobStar , entries , cb )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _processReaddir2 = function ( prefix , read , abs , remain , index , inGlobStar , entries , cb ) {
// if the abs isn't a dir, then nothing can match!
if ( ! entries )
return cb ( )
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain [ 0 ]
var negate = ! ! this . minimatch . negate
var rawGlob = pn . _glob
var dotOk = this . dot || rawGlob . charAt ( 0 ) === '.'
var matchedEntries = [ ]
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) !== '.' || dotOk ) {
var m
if ( negate && ! prefix ) {
m = ! e . match ( pn )
} else {
m = e . match ( pn )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( m )
matchedEntries . push ( e )
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries . length
// If there are no matched entries, then nothing matches.
if ( len === 0 )
return cb ( )
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if ( remain . length === 1 && ! this . mark && ! this . stat ) {
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
if ( prefix ) {
if ( prefix !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( e . charAt ( 0 ) === '/' && ! this . nomount ) {
e = path . join ( this . root , e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . _emitMatch ( index , e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// This was the last one, and no stats were needed
return cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// now test all matched entries as stand-ins for that part
// of the pattern.
remain . shift ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
var newPattern
if ( prefix ) {
if ( prefix !== '/' )
e = prefix + '/' + e
else
e = prefix + e
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . _process ( [ e ] . concat ( remain ) , index , inGlobStar , cb )
}
cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _emitMatch = function ( index , e ) {
if ( this . aborted )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( isIgnored ( this , e ) )
return
if ( this . paused ) {
this . _emitQueue . push ( [ index , e ] )
return
}
var abs = isAbsolute ( e ) ? e : this . _makeAbs ( e )
if ( this . mark )
e = this . _mark ( e )
if ( this . absolute )
e = abs
if ( this . matches [ index ] [ e ] )
return
if ( this . nodir ) {
var c = this . cache [ abs ]
if ( c === 'DIR' || Array . isArray ( c ) )
return
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . matches [ index ] [ e ] = true
var st = this . statCache [ abs ]
if ( st )
this . emit ( 'stat' , e , st )
this . emit ( 'match' , e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Glob . prototype . _readdirInGlobStar = function ( abs , cb ) {
if ( this . aborted )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if ( this . follow )
return this . _readdir ( abs , false , cb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight ( lstatkey , lstatcb _ )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( lstatcb )
fs . lstat ( abs , lstatcb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function lstatcb _ ( er , lstat ) {
if ( er && er . code === 'ENOENT' )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var isSym = lstat && lstat . isSymbolicLink ( )
self . symlinks [ abs ] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if ( ! isSym && lstat && ! lstat . isDirectory ( ) ) {
self . cache [ abs ] = 'FILE'
cb ( )
} else
self . _readdir ( abs , false , cb )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _readdir = function ( abs , inGlobStar , cb ) {
if ( this . aborted )
return
cb = inflight ( 'readdir\0' + abs + '\0' + inGlobStar , cb )
if ( ! cb )
return
//console.error('RD %j %j', +inGlobStar, abs)
if ( inGlobStar && ! ownProp ( this . symlinks , abs ) )
return this . _readdirInGlobStar ( abs , cb )
if ( ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( ! c || c === 'FILE' )
return cb ( )
if ( Array . isArray ( c ) )
return cb ( null , c )
}
var self = this
fs . readdir ( abs , readdirCb ( this , abs , cb ) )
}
function readdirCb ( self , abs , cb ) {
return function ( er , entries ) {
if ( er )
self . _readdirError ( abs , er , cb )
2020-08-23 03:31:38 +02:00
else
2020-09-02 10:07:11 +02:00
self . _readdirEntries ( abs , entries , cb )
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
Glob . prototype . _readdirEntries = function ( abs , entries , cb ) {
if ( this . aborted )
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if ( ! this . mark && ! this . stat ) {
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( abs === '/' )
e = abs + e
else
e = abs + '/' + e
this . cache [ e ] = true
}
}
this . cache [ abs ] = entries
return cb ( null , entries )
}
Glob . prototype . _readdirError = function ( f , er , cb ) {
if ( this . aborted )
return
// handle errors, and cache the information
switch ( er . code ) {
case 'ENOTSUP' : // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR' : // totally normal. means it *does* exist.
var abs = this . _makeAbs ( f )
this . cache [ abs ] = 'FILE'
if ( abs === this . cwdAbs ) {
var error = new Error ( er . code + ' invalid cwd ' + this . cwd )
error . path = this . cwd
error . code = er . code
this . emit ( 'error' , error )
this . abort ( )
}
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
case 'ENOENT' : // not terribly unusual
case 'ELOOP' :
case 'ENAMETOOLONG' :
case 'UNKNOWN' :
this . cache [ this . _makeAbs ( f ) ] = false
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
default : // some unusual error. Treat as failure.
this . cache [ this . _makeAbs ( f ) ] = false
if ( this . strict ) {
this . emit ( 'error' , er )
// If the error is handled, then we abort
// if not, we threw out of here
this . abort ( )
}
if ( ! this . silent )
console . error ( 'glob error' , er )
break
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _processGlobStar = function ( prefix , read , abs , remain , index , inGlobStar , cb ) {
var self = this
this . _readdir ( abs , inGlobStar , function ( er , entries ) {
self . _processGlobStar2 ( prefix , read , abs , remain , index , inGlobStar , entries , cb )
2020-08-23 03:31:38 +02:00
} )
}
2020-09-02 10:07:11 +02:00
Glob . prototype . _processGlobStar2 = function ( prefix , read , abs , remain , index , inGlobStar , entries , cb ) {
//console.error('pgs2', prefix, remain[0], entries)
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if ( ! entries )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain . slice ( 1 )
var gspref = prefix ? [ prefix ] : [ ]
var noGlobStar = gspref . concat ( remainWithoutGlobStar )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// the noGlobStar pattern exits the inGlobStar state
this . _process ( noGlobStar , index , false , cb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var isSym = this . symlinks [ abs ]
var len = entries . length
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// If it's a symlink, and we're in a globstar, then stop
if ( isSym && inGlobStar )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
for ( var i = 0 ; i < len ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) === '.' && ! this . dot )
continue
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// these two cases enter the inGlobStar state
var instead = gspref . concat ( entries [ i ] , remainWithoutGlobStar )
this . _process ( instead , index , true , cb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var below = gspref . concat ( entries [ i ] , remain )
this . _process ( below , index , true , cb )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _processSimple = function ( prefix , index , cb ) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this . _stat ( prefix , function ( er , exists ) {
self . _processSimple2 ( prefix , index , er , exists , cb )
} )
}
Glob . prototype . _processSimple2 = function ( prefix , index , er , exists , cb ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
//console.error('ps2', prefix, exists)
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// If it doesn't exist, then just mark the lack of results
if ( ! exists )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( prefix && isAbsolute ( prefix ) && ! this . nomount ) {
var trail = /[\/\\]$/ . test ( prefix )
if ( prefix . charAt ( 0 ) === '/' ) {
prefix = path . join ( this . root , prefix )
} else {
prefix = path . resolve ( this . root , prefix )
if ( trail )
prefix += '/'
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( process . platform === 'win32' )
prefix = prefix . replace ( /\\/g , '/' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Mark this as a match
this . _emitMatch ( index , prefix )
cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Returns either 'DIR', 'FILE', or false
Glob . prototype . _stat = function ( f , cb ) {
var abs = this . _makeAbs ( f )
var needDir = f . slice ( - 1 ) === '/'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( f . length > this . maxLength )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! this . stat && ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( Array . isArray ( c ) )
c = 'DIR'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// It exists, but maybe not how we need it
if ( ! needDir || c === 'DIR' )
return cb ( null , c )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( needDir && c === 'FILE' )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var exists
var stat = this . statCache [ abs ]
if ( stat !== undefined ) {
if ( stat === false )
return cb ( null , stat )
else {
var type = stat . isDirectory ( ) ? 'DIR' : 'FILE'
if ( needDir && type === 'FILE' )
return cb ( )
else
return cb ( null , type , stat )
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var self = this
var statcb = inflight ( 'stat\0' + abs , lstatcb _ )
if ( statcb )
fs . lstat ( abs , statcb )
function lstatcb _ ( er , lstat ) {
if ( lstat && lstat . isSymbolicLink ( ) ) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return fs . stat ( abs , function ( er , stat ) {
if ( er )
self . _stat2 ( f , abs , null , lstat , cb )
else
self . _stat2 ( f , abs , er , stat , cb )
} )
} else {
self . _stat2 ( f , abs , er , lstat , cb )
}
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _stat2 = function ( f , abs , er , stat , cb ) {
if ( er && ( er . code === 'ENOENT' || er . code === 'ENOTDIR' ) ) {
this . statCache [ abs ] = false
return cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var needDir = f . slice ( - 1 ) === '/'
this . statCache [ abs ] = stat
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( abs . slice ( - 1 ) === '/' && stat && ! stat . isDirectory ( ) )
return cb ( null , false , stat )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var c = true
if ( stat )
c = stat . isDirectory ( ) ? 'DIR' : 'FILE'
this . cache [ abs ] = this . cache [ abs ] || c
if ( needDir && c === 'FILE' )
return cb ( )
return cb ( null , c , stat )
2020-08-23 03:31:38 +02:00
}
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 959 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const assert = _ _webpack _require _ _ ( 357 )
const path = _ _webpack _require _ _ ( 622 )
const fs = _ _webpack _require _ _ ( 747 )
let glob = undefined
try {
glob = _ _webpack _require _ _ ( 957 )
} catch ( _err ) {
// treat glob as optional.
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const defaultGlobOpts = {
nosort : true ,
silent : true
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// for EMFILE handling
let timeout = 0
const isWindows = ( process . platform === "win32" )
const defaults = options => {
const methods = [
'unlink' ,
'chmod' ,
'stat' ,
'lstat' ,
'rmdir' ,
'readdir'
]
methods . forEach ( m => {
options [ m ] = options [ m ] || fs [ m ]
m = m + 'Sync'
options [ m ] = options [ m ] || fs [ m ]
} )
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
options . maxBusyTries = options . maxBusyTries || 3
options . emfileWait = options . emfileWait || 1000
if ( options . glob === false ) {
options . disableGlob = true
}
if ( options . disableGlob !== true && glob === undefined ) {
throw Error ( 'glob dependency not found, set `options.disableGlob = true` if intentional' )
}
options . disableGlob = options . disableGlob || false
options . glob = options . glob || defaultGlobOpts
}
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
const rimraf = ( p , options , cb ) => {
if ( typeof options === 'function' ) {
cb = options
options = { }
}
2020-08-16 00:36:41 +02:00
2020-09-02 10:07:11 +02:00
assert ( p , 'rimraf: missing path' )
assert . equal ( typeof p , 'string' , 'rimraf: path should be a string' )
assert . equal ( typeof cb , 'function' , 'rimraf: callback function required' )
assert ( options , 'rimraf: invalid options argument provided' )
assert . equal ( typeof options , 'object' , 'rimraf: options should be object' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
defaults ( options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
let busyTries = 0
let errState = null
let n = 0
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const next = ( er ) => {
errState = errState || er
if ( -- n === 0 )
cb ( errState )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const afterGlob = ( er , results ) => {
if ( er )
return cb ( er )
n = results . length
if ( n === 0 )
return cb ( )
results . forEach ( p => {
const CB = ( er ) => {
if ( er ) {
if ( ( er . code === "EBUSY" || er . code === "ENOTEMPTY" || er . code === "EPERM" ) &&
busyTries < options . maxBusyTries ) {
busyTries ++
// try again, with the same exact callback as this one.
return setTimeout ( ( ) => rimraf _ ( p , options , CB ) , busyTries * 100 )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// this one won't happen if graceful-fs is used.
if ( er . code === "EMFILE" && timeout < options . emfileWait ) {
return setTimeout ( ( ) => rimraf _ ( p , options , CB ) , timeout ++ )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// already gone
if ( er . code === "ENOENT" ) er = null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
timeout = 0
next ( er )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
rimraf _ ( p , options , CB )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( options . disableGlob || ! glob . hasMagic ( p ) )
return afterGlob ( null , [ p ] )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
options . lstat ( p , ( er , stat ) => {
if ( ! er )
return afterGlob ( null , [ p ] )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
glob ( p , options . glob , afterGlob )
} )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Two possible strategies.
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
//
// Both result in an extra syscall when you guess wrong. However, there
// are likely far more normal files in the world than directories. This
// is based on the assumption that a the average number of files per
// directory is >= 1.
//
// If anyone ever complains about this, then I guess the strategy could
// be made configurable somehow. But until then, YAGNI.
const rimraf _ = ( p , options , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
// sunos lets the root user unlink directories, which is... weird.
// so we have to lstat here and make sure it's not a dir.
options . lstat ( p , ( er , st ) => {
if ( er && er . code === "ENOENT" )
return cb ( null )
// Windows can EPERM on stat. Life is suffering.
if ( er && er . code === "EPERM" && isWindows )
fixWinEPERM ( p , options , er , cb )
if ( st && st . isDirectory ( ) )
return rmdir ( p , options , er , cb )
options . unlink ( p , er => {
if ( er ) {
if ( er . code === "ENOENT" )
return cb ( null )
if ( er . code === "EPERM" )
return ( isWindows )
? fixWinEPERM ( p , options , er , cb )
: rmdir ( p , options , er , cb )
if ( er . code === "EISDIR" )
return rmdir ( p , options , er , cb )
}
return cb ( er )
} )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const fixWinEPERM = ( p , options , er , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
options . chmod ( p , 0o666 , er2 => {
if ( er2 )
cb ( er2 . code === "ENOENT" ? null : er )
else
options . stat ( p , ( er3 , stats ) => {
if ( er3 )
cb ( er3 . code === "ENOENT" ? null : er )
else if ( stats . isDirectory ( ) )
rmdir ( p , options , er , cb )
else
options . unlink ( p , cb )
2020-08-23 03:31:38 +02:00
} )
2020-09-02 10:07:11 +02:00
} )
}
const fixWinEPERMSync = ( p , options , er ) => {
assert ( p )
assert ( options )
try {
options . chmodSync ( p , 0o666 )
} catch ( er2 ) {
if ( er2 . code === "ENOENT" )
return
else
throw er
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
let stats
try {
stats = options . statSync ( p )
} catch ( er3 ) {
if ( er3 . code === "ENOENT" )
return
else
throw er
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( stats . isDirectory ( ) )
rmdirSync ( p , options , er )
else
options . unlinkSync ( p )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const rmdir = ( p , options , originalEr , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
// if we guessed wrong, and it's not a directory, then
// raise the original error.
options . rmdir ( p , er => {
if ( er && ( er . code === "ENOTEMPTY" || er . code === "EEXIST" || er . code === "EPERM" ) )
rmkids ( p , options , cb )
else if ( er && er . code === "ENOTDIR" )
cb ( originalEr )
else
cb ( er )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const rmkids = ( p , options , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
options . readdir ( p , ( er , files ) => {
if ( er )
return cb ( er )
let n = files . length
if ( n === 0 )
return options . rmdir ( p , cb )
let errState
files . forEach ( f => {
rimraf ( path . join ( p , f ) , options , er => {
if ( errState )
return
if ( er )
return cb ( errState = er )
if ( -- n === 0 )
options . rmdir ( p , cb )
} )
} )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// this looks simpler, and is strictly *faster*, but will
// tie up the JavaScript thread and fail on excessively
// deep directory trees.
const rimrafSync = ( p , options ) => {
options = options || { }
defaults ( options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
assert ( p , 'rimraf: missing path' )
assert . equal ( typeof p , 'string' , 'rimraf: path should be a string' )
assert ( options , 'rimraf: missing options' )
assert . equal ( typeof options , 'object' , 'rimraf: options should be object' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
let results
if ( options . disableGlob || ! glob . hasMagic ( p ) ) {
results = [ p ]
} else {
try {
options . lstatSync ( p )
results = [ p ]
} catch ( er ) {
results = glob . sync ( p , options . glob )
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! results . length )
return
for ( let i = 0 ; i < results . length ; i ++ ) {
const p = results [ i ]
let st
try {
st = options . lstatSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
// Windows can EPERM on stat. Life is suffering.
if ( er . code === "EPERM" && isWindows )
fixWinEPERMSync ( p , options , er )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
try {
// sunos lets the root user unlink directories, which is... weird.
if ( st && st . isDirectory ( ) )
rmdirSync ( p , options , null )
else
options . unlinkSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
if ( er . code === "EPERM" )
return isWindows ? fixWinEPERMSync ( p , options , er ) : rmdirSync ( p , options , er )
if ( er . code !== "EISDIR" )
throw er
rmdirSync ( p , options , er )
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const rmdirSync = ( p , options , originalEr ) => {
assert ( p )
assert ( options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
try {
options . rmdirSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
if ( er . code === "ENOTDIR" )
throw originalEr
if ( er . code === "ENOTEMPTY" || er . code === "EEXIST" || er . code === "EPERM" )
rmkidsSync ( p , options )
}
}
const rmkidsSync = ( p , options ) => {
assert ( p )
assert ( options )
options . readdirSync ( p ) . forEach ( f => rimrafSync ( path . join ( p , f ) , options ) )
// We only end up here once we got ENOTEMPTY at least once, and
// at this point, we are guaranteed to have removed all the kids.
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
// try really hard to delete stuff on windows, because it has a
// PROFOUNDLY annoying habit of not closing handles promptly when
// files are deleted, resulting in spurious ENOTEMPTY errors.
const retries = isWindows ? 100 : 1
let i = 0
do {
let threw = true
try {
const ret = options . rmdirSync ( p , options )
threw = false
return ret
} finally {
if ( ++ i < retries && threw )
continue
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
} while ( true )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
module . exports = rimraf
rimraf . sync = rimrafSync
/***/ } ) ,
/***/ 962 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _a ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert _1 = _ _webpack _require _ _ ( 357 ) ;
const fs = _ _webpack _require _ _ ( 747 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
_a = fs . promises , exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
exports . IS _WINDOWS = process . platform === 'win32' ;
function exists ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exports . stat ( fsPath ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
}
throw err ;
}
return true ;
} ) ;
}
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
}
exports . isDirectory = isDirectory ;
/ * *
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
* /
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
}
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
}
return p . startsWith ( '/' ) ;
}
exports . isRooted = isRooted ;
/ * *
* Recursively create a directory at ` fsPath ` .
*
* This implementation is optimistic , meaning it attempts to create the full
* path first , and backs up the path stack from there .
*
* @ param fsPath The path to create
* @ param maxDepth The maximum recursion depth
* @ param depth The current recursion depth
* /
function mkdirP ( fsPath , maxDepth = 1000 , depth = 1 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
fsPath = path . resolve ( fsPath ) ;
if ( depth >= maxDepth )
return exports . mkdir ( fsPath ) ;
try {
yield exports . mkdir ( fsPath ) ;
return ;
}
catch ( err ) {
switch ( err . code ) {
case 'ENOENT' : {
yield mkdirP ( path . dirname ( fsPath ) , maxDepth , depth + 1 ) ;
yield exports . mkdir ( fsPath ) ;
return ;
}
default : {
let stats ;
try {
stats = yield exports . stat ( fsPath ) ;
}
catch ( err2 ) {
throw err ;
}
if ( ! stats . isDirectory ( ) )
throw err ;
}
}
}
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
* /
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
}
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
}
}
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
}
return '' ;
} ) ;
}
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
}
//# sourceMappingURL=io-util.js.map
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 973 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
module . exports = minimatch
minimatch . Minimatch = Minimatch
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var path = { sep : '/' }
try {
path = _ _webpack _require _ _ ( 622 )
} catch ( er ) { }
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var GLOBSTAR = minimatch . GLOBSTAR = Minimatch . GLOBSTAR = { }
var expand = _ _webpack _require _ _ ( 717 )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var plTypes = {
'!' : { open : '(?:(?!(?:' , close : '))[^/]*?)' } ,
'?' : { open : '(?:' , close : ')?' } ,
'+' : { open : '(?:' , close : ')+' } ,
'*' : { open : '(?:' , close : ')*' } ,
'@' : { open : '(?:' , close : ')' }
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// any single thing other than /
// don't need to escape / when using new RegExp()
var qmark = '[^/]'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// * => any number of characters
var star = qmark + '*?'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// characters that need to be escaped in RegExp.
var reSpecials = charSet ( '().*{}+?[]^$\\!' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// "abc" -> { a:true, b:true, c:true }
function charSet ( s ) {
return s . split ( '' ) . reduce ( function ( set , c ) {
set [ c ] = true
return set
} , { } )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// normalizes slashes.
var slashSplit = /\/+/
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
minimatch . filter = filter
function filter ( pattern , options ) {
options = options || { }
return function ( p , i , list ) {
return minimatch ( p , pattern , options )
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
function ext ( a , b ) {
a = a || { }
b = b || { }
var t = { }
Object . keys ( b ) . forEach ( function ( k ) {
t [ k ] = b [ k ]
} )
Object . keys ( a ) . forEach ( function ( k ) {
t [ k ] = a [ k ]
} )
return t
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return minimatch
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var orig = minimatch
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var m = function minimatch ( p , pattern , options ) {
return orig . minimatch ( p , pattern , ext ( def , options ) )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
m . Minimatch = function Minimatch ( pattern , options ) {
return new orig . Minimatch ( pattern , ext ( def , options ) )
}
return m
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return Minimatch
return minimatch . defaults ( def ) . Minimatch
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
function minimatch ( p , pattern , options ) {
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! options ) options = { }
// shortcut: comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
return false
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// "" only matches ""
if ( pattern . trim ( ) === '' ) return p === ''
return new Minimatch ( pattern , options ) . match ( p )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
function Minimatch ( pattern , options ) {
if ( ! ( this instanceof Minimatch ) ) {
return new Minimatch ( pattern , options )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! options ) options = { }
pattern = pattern . trim ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// windows support: need to use /, not \
if ( path . sep !== '/' ) {
pattern = pattern . split ( path . sep ) . join ( '/' )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . options = options
this . set = [ ]
this . pattern = pattern
this . regexp = null
this . negate = false
this . comment = false
this . empty = false
// make the set of regexps etc.
this . make ( )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . prototype . debug = function ( ) { }
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Minimatch . prototype . make = make
function make ( ) {
// don't do it more than once.
if ( this . _made ) return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var pattern = this . pattern
var options = this . options
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// empty patterns and comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
this . comment = true
return
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! pattern ) {
this . empty = true
return
}
// step 1: figure out negation, etc.
this . parseNegate ( )
// step 2: expand braces
var set = this . globSet = this . braceExpand ( )
if ( options . debug ) this . debug = console . error
this . debug ( this . pattern , set )
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this . globParts = set . map ( function ( s ) {
return s . split ( slashSplit )
2020-08-23 03:31:38 +02:00
} )
2020-09-02 10:07:11 +02:00
this . debug ( this . pattern , set )
// glob --> regexps
set = set . map ( function ( s , si , set ) {
return s . map ( this . parse , this )
} , this )
this . debug ( this . pattern , set )
// filter out everything that didn't compile properly.
set = set . filter ( function ( s ) {
return s . indexOf ( false ) === - 1
} )
this . debug ( this . pattern , set )
this . set = set
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . prototype . parseNegate = parseNegate
function parseNegate ( ) {
var pattern = this . pattern
var negate = false
var options = this . options
var negateOffset = 0
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( options . nonegate ) return
2020-08-16 19:13:19 +02:00
2020-09-02 10:07:11 +02:00
for ( var i = 0 , l = pattern . length
; i < l && pattern . charAt ( i ) === '!'
; i ++ ) {
negate = ! negate
negateOffset ++
}
2020-08-16 19:13:19 +02:00
2020-09-02 10:07:11 +02:00
if ( negateOffset ) this . pattern = pattern . substr ( negateOffset )
this . negate = negate
}
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch . braceExpand = function ( pattern , options ) {
return braceExpand ( pattern , options )
2020-08-17 18:35:15 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . prototype . braceExpand = braceExpand
function braceExpand ( pattern , options ) {
if ( ! options ) {
if ( this instanceof Minimatch ) {
options = this . options
} else {
options = { }
}
}
pattern = typeof pattern === 'undefined'
? this . pattern : pattern
if ( typeof pattern === 'undefined' ) {
throw new TypeError ( 'undefined pattern' )
}
if ( options . nobrace ||
! pattern . match ( /\{.*\}/ ) ) {
// shortcut. no need to expand.
return [ pattern ]
}
return expand ( pattern )
2020-08-17 18:35:15 +02:00
}
2020-09-02 10:07:11 +02:00
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
Minimatch . prototype . parse = parse
var SUBPARSE = { }
function parse ( pattern , isSub ) {
if ( pattern . length > 1024 * 64 ) {
throw new TypeError ( 'pattern is too long' )
}
var options = this . options
// shortcuts
if ( ! options . noglobstar && pattern === '**' ) return GLOBSTAR
if ( pattern === '' ) return ''
var re = ''
var hasMagic = ! ! options . nocase
var escaping = false
// ? => one single character
var patternListStack = [ ]
var negativeLists = [ ]
var stateChar
var inClass = false
var reClassStart = - 1
var classStart = - 1
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
var patternStart = pattern . charAt ( 0 ) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options . dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)'
var self = this
function clearStateChar ( ) {
if ( stateChar ) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch ( stateChar ) {
case '*' :
re += star
hasMagic = true
break
case '?' :
re += qmark
hasMagic = true
break
default :
re += '\\' + stateChar
break
}
self . debug ( 'clearStateChar %j %j' , stateChar , re )
stateChar = false
}
}
for ( var i = 0 , len = pattern . length , c
; ( i < len ) && ( c = pattern . charAt ( i ) )
; i ++ ) {
this . debug ( '%s\t%s %s %j' , pattern , i , re , c )
// skip over any that are escaped.
if ( escaping && reSpecials [ c ] ) {
re += '\\' + c
escaping = false
continue
}
switch ( c ) {
case '/' :
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
case '\\' :
clearStateChar ( )
escaping = true
continue
// the various stateChar values
// for the "extglob" stuff.
case '?' :
case '*' :
case '+' :
case '@' :
case '!' :
this . debug ( '%s\t%s %s %j <-- stateChar' , pattern , i , re , c )
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if ( inClass ) {
this . debug ( ' in class' )
if ( c === '!' && i === classStart + 1 ) c = '^'
re += c
continue
2020-08-17 18:35:15 +02:00
}
2020-09-02 10:07:11 +02:00
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
self . debug ( 'call clearStateChar %j' , stateChar )
clearStateChar ( )
stateChar = c
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if ( options . noext ) clearStateChar ( )
continue
case '(' :
if ( inClass ) {
re += '('
continue
2020-08-17 22:18:15 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! stateChar ) {
re += '\\('
continue
2020-08-17 22:18:15 +02:00
}
2020-09-02 10:07:11 +02:00
patternListStack . push ( {
type : stateChar ,
start : i - 1 ,
reStart : re . length ,
open : plTypes [ stateChar ] . open ,
close : plTypes [ stateChar ] . close
} )
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this . debug ( 'plType %j %j' , stateChar , re )
stateChar = false
continue
case ')' :
if ( inClass || ! patternListStack . length ) {
re += '\\)'
continue
2020-08-17 22:18:15 +02:00
}
2020-09-02 10:07:11 +02:00
clearStateChar ( )
hasMagic = true
var pl = patternListStack . pop ( )
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
re += pl . close
if ( pl . type === '!' ) {
negativeLists . push ( pl )
2020-08-17 22:18:15 +02:00
}
2020-09-02 10:07:11 +02:00
pl . reEnd = re . length
continue
case '|' :
if ( inClass || ! patternListStack . length || escaping ) {
re += '\\|'
escaping = false
continue
2020-08-17 18:35:15 +02:00
}
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
clearStateChar ( )
re += '|'
continue
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// these are mostly the same in regexp and glob
case '[' :
// swallow any state-tracking char before the [
clearStateChar ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( inClass ) {
re += '\\' + c
continue
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
inClass = true
classStart = i
reClassStart = re . length
re += c
continue
case ']' :
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if ( i === classStart + 1 || ! inClass ) {
re += '\\' + c
escaping = false
continue
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
if ( inClass ) {
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern . substring ( classStart + 1 , i )
try {
RegExp ( '[' + cs + ']' )
} catch ( er ) {
// not a valid class!
var sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ] + '\\]'
hasMagic = hasMagic || sp [ 1 ]
inClass = false
continue
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// finish up the class.
hasMagic = true
inClass = false
re += c
continue
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
default :
// swallow any state char that wasn't consumed
clearStateChar ( )
if ( escaping ) {
// no need
escaping = false
} else if ( reSpecials [ c ]
&& ! ( c === '^' && inClass ) ) {
re += '\\'
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
re += c
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if ( inClass ) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern . substr ( classStart + 1 )
sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ]
hasMagic = hasMagic || sp [ 1 ]
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for ( pl = patternListStack . pop ( ) ; pl ; pl = patternListStack . pop ( ) ) {
var tail = re . slice ( pl . reStart + pl . open . length )
this . debug ( 'setting tail' , re , pl )
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail . replace ( /((?:\\{2}){0,64})(\\?)\|/g , function ( _ , $1 , $2 ) {
if ( ! $2 ) {
// the | isn't already escaped, so escape it.
$2 = '\\'
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
} )
this . debug ( 'tail=%j\n %s' , tail , tail , pl , re )
var t = pl . type === '*' ? star
: pl . type === '?' ? qmark
: '\\' + pl . type
hasMagic = true
re = re . slice ( 0 , pl . reStart ) + t + '\\(' + tail
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// handle trailing things that only matter at the very end.
clearStateChar ( )
if ( escaping ) {
// trailing \\
re += '\\\\'
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch ( re . charAt ( 0 ) ) {
case '.' :
case '[' :
case '(' : addPatternStart = true
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for ( var n = negativeLists . length - 1 ; n > - 1 ; n -- ) {
var nl = negativeLists [ n ]
var nlBefore = re . slice ( 0 , nl . reStart )
var nlFirst = re . slice ( nl . reStart , nl . reEnd - 8 )
var nlLast = re . slice ( nl . reEnd - 8 , nl . reEnd )
var nlAfter = re . slice ( nl . reEnd )
nlLast += nlAfter
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
var openParensBefore = nlBefore . split ( '(' ) . length - 1
var cleanAfter = nlAfter
for ( i = 0 ; i < openParensBefore ; i ++ ) {
cleanAfter = cleanAfter . replace ( /\)[+*?]?/ , '' )
}
nlAfter = cleanAfter
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var dollar = ''
if ( nlAfter === '' && isSub !== SUBPARSE ) {
dollar = '$'
}
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
re = newRe
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if ( re !== '' && hasMagic ) {
re = '(?=.)' + re
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( addPatternStart ) {
re = patternStart + re
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// parsing just a piece of a larger pattern.
if ( isSub === SUBPARSE ) {
return [ re , hasMagic ]
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if ( ! hasMagic ) {
return globUnescape ( pattern )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
var flags = options . nocase ? 'i' : ''
try {
var regExp = new RegExp ( '^' + re + '$' , flags )
} catch ( er ) {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
// mode, but it's not a /m regex.
return new RegExp ( '$.' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
regExp . _glob = pattern
regExp . _src = re
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return regExp
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
minimatch . makeRe = function ( pattern , options ) {
return new Minimatch ( pattern , options || { } ) . makeRe ( )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . prototype . makeRe = makeRe
function makeRe ( ) {
if ( this . regexp || this . regexp === false ) return this . regexp
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
var set = this . set
if ( ! set . length ) {
this . regexp = false
return this . regexp
}
var options = this . options
var twoStar = options . noglobstar ? star
: options . dot ? twoStarDot
: twoStarNoDot
var flags = options . nocase ? 'i' : ''
var re = set . map ( function ( pattern ) {
return pattern . map ( function ( p ) {
return ( p === GLOBSTAR ) ? twoStar
: ( typeof p === 'string' ) ? regExpEscape ( p )
: p . _src
} ) . join ( '\\\/' )
} ) . join ( '|' )
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
if ( this . negate ) re = '^(?!' + re + ').*$'
try {
this . regexp = new RegExp ( re , flags )
} catch ( ex ) {
this . regexp = false
}
return this . regexp
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
minimatch . match = function ( list , pattern , options ) {
options = options || { }
var mm = new Minimatch ( pattern , options )
list = list . filter ( function ( f ) {
return mm . match ( f )
} )
if ( mm . options . nonull && ! list . length ) {
list . push ( pattern )
}
return list
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Minimatch . prototype . match = match
function match ( f , partial ) {
this . debug ( 'match' , f , this . pattern )
// short-circuit in the case of busted things.
// comments, etc.
if ( this . comment ) return false
if ( this . empty ) return f === ''
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( f === '/' && partial ) return true
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var options = this . options
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// windows: need to use /, not \
if ( path . sep !== '/' ) {
f = f . split ( path . sep ) . join ( '/' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// treat the test path as a set of pathparts.
f = f . split ( slashSplit )
this . debug ( this . pattern , 'split' , f )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var set = this . set
this . debug ( this . pattern , 'set' , set )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Find the basename of the path by looking for the last non-empty segment
var filename
var i
for ( i = f . length - 1 ; i >= 0 ; i -- ) {
filename = f [ i ]
if ( filename ) break
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
for ( i = 0 ; i < set . length ; i ++ ) {
var pattern = set [ i ]
var file = f
if ( options . matchBase && pattern . length === 1 ) {
file = [ filename ]
}
var hit = this . matchOne ( file , pattern , partial )
if ( hit ) {
if ( options . flipNegate ) return true
return ! this . negate
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if ( options . flipNegate ) return false
return this . negate
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
Minimatch . prototype . matchOne = function ( file , pattern , partial ) {
var options = this . options
this . debug ( 'matchOne' ,
{ 'this' : this , file : file , pattern : pattern } )
this . debug ( 'matchOne' , file . length , pattern . length )
for ( var fi = 0 ,
pi = 0 ,
fl = file . length ,
pl = pattern . length
; ( fi < fl ) && ( pi < pl )
; fi ++ , pi ++ ) {
this . debug ( 'matchOne loop' )
var p = pattern [ pi ]
var f = file [ fi ]
this . debug ( pattern , p , f )
// should be impossible.
// some invalid regexp stuff in the set.
if ( p === false ) return false
if ( p === GLOBSTAR ) {
this . debug ( 'GLOBSTAR' , [ pattern , p , f ] )
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
var pr = pi + 1
if ( pr === pl ) {
this . debug ( '** at the end' )
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for ( ; fi < fl ; fi ++ ) {
if ( file [ fi ] === '.' || file [ fi ] === '..' ||
( ! options . dot && file [ fi ] . charAt ( 0 ) === '.' ) ) return false
}
return true
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// ok, let's see if we can swallow whatever we can.
while ( fr < fl ) {
var swallowee = file [ fr ]
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . debug ( '\nglobstar while' , file , fr , pattern , pr , swallowee )
// XXX remove this slice. Just pass the start index.
if ( this . matchOne ( file . slice ( fr ) , pattern . slice ( pr ) , partial ) ) {
this . debug ( 'globstar found match!' , fr , fl , swallowee )
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if ( swallowee === '.' || swallowee === '..' ||
( ! options . dot && swallowee . charAt ( 0 ) === '.' ) ) {
this . debug ( 'dot detected!' , file , fr , pattern , pr )
break
}
// ** swallows a segment, and continue.
this . debug ( 'globstar swallow a segment, and continue' )
fr ++
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
if ( partial ) {
// ran out of file
this . debug ( '\n>>> no match, partial?' , file , fr , pattern , pr )
if ( fr === fl ) return true
}
return false
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
if ( typeof p === 'string' ) {
if ( options . nocase ) {
hit = f . toLowerCase ( ) === p . toLowerCase ( )
} else {
hit = f === p
}
this . debug ( 'string match' , p , f , hit )
} else {
hit = f . match ( p )
this . debug ( 'pattern match' , p , f , hit )
}
if ( ! hit ) return false
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if ( fi === fl && pi === pl ) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if ( fi === fl ) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else if ( pi === pl ) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
var emptyFileEnd = ( fi === fl - 1 ) && ( file [ fi ] === '' )
return emptyFileEnd
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// should be unreachable.
throw new Error ( 'wtf?' )
}
// replace stuff like \* with *
function globUnescape ( s ) {
return s . replace ( /\\(.)/g , '$1' )
}
function regExpEscape ( s ) {
return s . replace ( /[-[\]{}()*+?.,\\^$|#\s]/g , '\\$&' )
2020-08-23 03:31:38 +02:00
}
2020-08-16 22:31:37 +02:00
/***/ } )
2020-08-17 18:35:15 +02:00
/******/ } ) ;