1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-27 14:39:16 +01:00

Merge branch 'main' into main

This commit is contained in:
Rob Herley 2024-01-11 15:19:28 -05:00 committed by GitHub
commit 1326563738
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 878 additions and 1154 deletions

View file

@ -1,12 +1,12 @@
--- ---
name: "@actions/cache" name: "@actions/cache"
version: 3.2.2 version: 3.2.3
type: npm type: npm
summary: summary: Actions cache lib
homepage: homepage: https://github.com/actions/toolkit/tree/main/packages/cache
license: mit license: mit
licenses: licenses:
- sources: LICENSE.md - sources: LICENSE.md
text: |- text: |-
The MIT License (MIT) The MIT License (MIT)

View file

@ -1,6 +1,6 @@
--- ---
name: "@azure/core-http" name: "@azure/core-http"
version: 3.0.0 version: 3.0.4
type: npm type: npm
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
libraries generated using AutoRest libraries generated using AutoRest

View file

@ -1,9 +1,10 @@
--- ---
name: "@azure/ms-rest-js" name: "@azure/ms-rest-js"
version: 2.6.1 version: 2.7.0
type: npm type: npm
summary: summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
homepage: libraries generated using AutoRest
homepage: https://github.com/Azure/ms-rest-js
license: mit license: mit
licenses: licenses:
- sources: LICENSE - sources: LICENSE

View file

@ -1,34 +0,0 @@
---
name: ip-regex
version: 2.1.0
type: npm
summary: Regular expression for matching IP addresses (IPv4 & IPv6)
homepage: https://github.com/sindresorhus/ip-regex#readme
license: mit
licenses:
- sources: license
text: |
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
- sources: readme.md
text: MIT © [Sindre Sorhus](https://sindresorhus.com)
notices: []

View file

@ -1,43 +0,0 @@
---
name: psl
version: 1.8.0
type: npm
summary: Domain name parser based on the Public Suffix List
homepage: https://github.com/lupomontero/psl#readme
license: mit
licenses:
- sources: LICENSE
text: |
The MIT License (MIT)
Copyright (c) 2017 Lupo Montero lupomontero@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
- sources: README.md
text: |-
The MIT License (MIT)
Copyright (c) 2017 Lupo Montero <lupomontero@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
notices: []

View file

@ -1,34 +0,0 @@
---
name: punycode
version: 2.1.1
type: npm
summary: A robust Punycode converter that fully complies to RFC 3492 and RFC 5891,
and works on nearly all JavaScript platforms.
homepage: https://mths.be/punycode
license: mit
licenses:
- sources: LICENSE-MIT.txt
text: |
Copyright Mathias Bynens <https://mathiasbynens.be/>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
- sources: README.md
text: Punycode.js is available under the [MIT](https://mths.be/mit) license.
notices: []

View file

@ -1,9 +1,9 @@
--- ---
name: semver name: semver
version: 6.3.0 version: 6.3.1
type: npm type: npm
summary: The semantic version parser used by npm. summary: The semantic version parser used by npm.
homepage: https://github.com/npm/node-semver#readme homepage:
license: isc license: isc
licenses: licenses:
- sources: LICENSE - sources: LICENSE

View file

@ -1,23 +0,0 @@
---
name: tough-cookie
version: 3.0.1
type: npm
summary: RFC6265 Cookies and Cookie Jar for node.js
homepage: https://github.com/salesforce/tough-cookie
license: bsd-3-clause
licenses:
- sources: LICENSE
text: |
Copyright (c) 2015, Salesforce.com, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
notices: []

View file

@ -1,6 +1,6 @@
--- ---
name: xml2js name: xml2js
version: 0.4.23 version: 0.5.0
type: npm type: npm
summary: Simple XML to JavaScript object converter. summary: Simple XML to JavaScript object converter.
homepage: https://github.com/Leonidas-from-XIV/node-xml2js homepage: https://github.com/Leonidas-from-XIV/node-xml2js

View file

@ -111,3 +111,8 @@
### 3.3.2 ### 3.3.2
- Fixes bug with Azure SDK causing blob downloads to get stuck. - Fixes bug with Azure SDK causing blob downloads to get stuck.
### 3.3.3
- Updates @actions/cache to v3.2.3 to fix accidental mutated path arguments to `getCacheVersion` [actions/toolkit#1378](https://github.com/actions/toolkit/pull/1378)
- Additional audit fixes of npm package(s)

View file

@ -1136,10 +1136,9 @@ function resolvePaths(patterns) {
implicitDescendants: false implicitDescendants: false
}); });
try { try {
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
_c = _g.value; _c = _g.value;
_e = false; _e = false;
try {
const file = _c; const file = _c;
const relativeFile = path const relativeFile = path
.relative(workspace, file) .relative(workspace, file)
@ -1154,10 +1153,6 @@ function resolvePaths(patterns) {
paths.push(`${relativeFile}`); paths.push(`${relativeFile}`);
} }
} }
finally {
_e = true;
}
}
} }
catch (e_1_1) { e_1 = { error: e_1_1 }; } catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally { finally {
@ -3389,7 +3384,8 @@ function createHttpClient() {
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
} }
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths; // don't pass changes upstream
const components = paths.slice();
// Add compression method to cache version to restore // Add compression method to cache version to restore
// compressed cache as per compression method // compressed cache as per compression method
if (compressionMethod) { if (compressionMethod) {
@ -7276,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
// Max safe segment length for coercion. // Max safe segment length for coercion.
var MAX_SAFE_COMPONENT_LENGTH = 16 var MAX_SAFE_COMPONENT_LENGTH = 16
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
// The actual regexps go on exports.re // The actual regexps go on exports.re
var re = exports.re = [] var re = exports.re = []
var safeRe = exports.safeRe = []
var src = exports.src = [] var src = exports.src = []
var t = exports.tokens = {} var t = exports.tokens = {}
var R = 0 var R = 0
@ -7286,6 +7285,31 @@ function tok (n) {
t[n] = R++ t[n] = R++
} }
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
var safeRegexReplacements = [
['\\s', 1],
['\\d', MAX_LENGTH],
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
]
function makeSafeRe (value) {
for (var i = 0; i < safeRegexReplacements.length; i++) {
var token = safeRegexReplacements[i][0]
var max = safeRegexReplacements[i][1]
value = value
.split(token + '*').join(token + '{0,' + max + '}')
.split(token + '+').join(token + '{1,' + max + '}')
}
return value
}
// The following Regular Expressions can be used for tokenizing, // The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings. // validating, and parsing SemVer version strings.
@ -7295,14 +7319,14 @@ function tok (n) {
tok('NUMERICIDENTIFIER') tok('NUMERICIDENTIFIER')
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
tok('NUMERICIDENTIFIERLOOSE') tok('NUMERICIDENTIFIERLOOSE')
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
// ## Non-numeric Identifier // ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or // Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens. // more letters, digits, or hyphens.
tok('NONNUMERICIDENTIFIER') tok('NONNUMERICIDENTIFIER')
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
// ## Main Version // ## Main Version
// Three dot-separated numeric identifiers. // Three dot-separated numeric identifiers.
@ -7344,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
// Any combination of digits, letters, or hyphens. // Any combination of digits, letters, or hyphens.
tok('BUILDIDENTIFIER') tok('BUILDIDENTIFIER')
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
// ## Build Metadata // ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata // Plus sign, followed by one or more period-separated build metadata
@ -7424,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
'(?:$|[^\\d])' '(?:$|[^\\d])'
tok('COERCERTL') tok('COERCERTL')
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
// Tilde ranges. // Tilde ranges.
// Meaning is "reasonably at or greater than" // Meaning is "reasonably at or greater than"
@ -7433,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
tok('TILDETRIM') tok('TILDETRIM')
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
var tildeTrimReplace = '$1~' var tildeTrimReplace = '$1~'
tok('TILDE') tok('TILDE')
@ -7448,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
tok('CARETTRIM') tok('CARETTRIM')
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
var caretTrimReplace = '$1^' var caretTrimReplace = '$1^'
tok('CARET') tok('CARET')
@ -7469,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
// this one has to use the /g flag // this one has to use the /g flag
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
var comparatorTrimReplace = '$1$2$3' var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4` // Something like `1.2.3 - 1.2.4`
@ -7497,6 +7525,14 @@ for (var i = 0; i < R; i++) {
debug(i, src[i]) debug(i, src[i])
if (!re[i]) { if (!re[i]) {
re[i] = new RegExp(src[i]) re[i] = new RegExp(src[i])
// Replace all greedy whitespace to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
safeRe[i] = new RegExp(makeSafeRe(src[i]))
} }
} }
@ -7521,7 +7557,7 @@ function parse (version, options) {
return null return null
} }
var r = options.loose ? re[t.LOOSE] : re[t.FULL] var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
if (!r.test(version)) { if (!r.test(version)) {
return null return null
} }
@ -7576,7 +7612,7 @@ function SemVer (version, options) {
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
if (!m) { if (!m) {
throw new TypeError('Invalid Version: ' + version) throw new TypeError('Invalid Version: ' + version)
@ -8021,6 +8057,7 @@ function Comparator (comp, options) {
return new Comparator(comp, options) return new Comparator(comp, options)
} }
comp = comp.trim().split(/\s+/).join(' ')
debug('comparator', comp, options) debug('comparator', comp, options)
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
@ -8037,7 +8074,7 @@ function Comparator (comp, options) {
var ANY = {} var ANY = {}
Comparator.prototype.parse = function (comp) { Comparator.prototype.parse = function (comp) {
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var m = comp.match(r) var m = comp.match(r)
if (!m) { if (!m) {
@ -8161,9 +8198,16 @@ function Range (range, options) {
this.loose = !!options.loose this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease this.includePrerelease = !!options.includePrerelease
// First, split based on boolean or || // First reduce all whitespace as much as possible so we do not have to rely
// on potentially slow regexes like \s*. This is then stored and used for
// future error messages as well.
this.raw = range this.raw = range
this.set = range.split(/\s*\|\|\s*/).map(function (range) { .trim()
.split(/\s+/)
.join(' ')
// First, split based on boolean or ||
this.set = this.raw.split('||').map(function (range) {
return this.parseRange(range.trim()) return this.parseRange(range.trim())
}, this).filter(function (c) { }, this).filter(function (c) {
// throw out any that are not relevant for whatever reason // throw out any that are not relevant for whatever reason
@ -8171,7 +8215,7 @@ function Range (range, options) {
}) })
if (!this.set.length) { if (!this.set.length) {
throw new TypeError('Invalid SemVer Range: ' + range) throw new TypeError('Invalid SemVer Range: ' + this.raw)
} }
this.format() this.format()
@ -8190,20 +8234,19 @@ Range.prototype.toString = function () {
Range.prototype.parseRange = function (range) { Range.prototype.parseRange = function (range) {
var loose = this.options.loose var loose = this.options.loose
range = range.trim()
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace) range = range.replace(hr, hyphenReplace)
debug('hyphen replace', range) debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range, re[t.COMPARATORTRIM]) debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
// `~ 1.2.3` => `~1.2.3` // `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace) range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
// `^ 1.2.3` => `^1.2.3` // `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace) range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
// normalize spaces // normalize spaces
range = range.split(/\s+/).join(' ') range = range.split(/\s+/).join(' ')
@ -8211,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
// At this point, the range is completely trimmed and // At this point, the range is completely trimmed and
// ready to be split into comparators. // ready to be split into comparators.
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var set = range.split(' ').map(function (comp) { var set = range.split(' ').map(function (comp) {
return parseComparator(comp, this.options) return parseComparator(comp, this.options)
}, this).join(' ').split(/\s+/) }, this).join(' ').split(/\s+/)
@ -8311,7 +8354,7 @@ function replaceTildes (comp, options) {
} }
function replaceTilde (comp, options) { function replaceTilde (comp, options) {
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('tilde', comp, _, M, m, p, pr) debug('tilde', comp, _, M, m, p, pr)
var ret var ret
@ -8352,7 +8395,7 @@ function replaceCarets (comp, options) {
function replaceCaret (comp, options) { function replaceCaret (comp, options) {
debug('caret', comp, options) debug('caret', comp, options)
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('caret', comp, _, M, m, p, pr) debug('caret', comp, _, M, m, p, pr)
var ret var ret
@ -8411,7 +8454,7 @@ function replaceXRanges (comp, options) {
function replaceXRange (comp, options) { function replaceXRange (comp, options) {
comp = comp.trim() comp = comp.trim()
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
return comp.replace(r, function (ret, gtlt, M, m, p, pr) { return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
debug('xRange', comp, ret, gtlt, M, m, p, pr) debug('xRange', comp, ret, gtlt, M, m, p, pr)
var xM = isX(M) var xM = isX(M)
@ -8486,7 +8529,7 @@ function replaceXRange (comp, options) {
function replaceStars (comp, options) { function replaceStars (comp, options) {
debug('replaceStars', comp, options) debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets! // Looseness is ignored here. star is always as loose as it gets!
return comp.trim().replace(re[t.STAR], '') return comp.trim().replace(safeRe[t.STAR], '')
} }
// This function is passed to string.replace(re[t.HYPHENRANGE]) // This function is passed to string.replace(re[t.HYPHENRANGE])
@ -8812,7 +8855,7 @@ function coerce (version, options) {
var match = null var match = null
if (!options.rtl) { if (!options.rtl) {
match = version.match(re[t.COERCE]) match = version.match(safeRe[t.COERCE])
} else { } else {
// Find the right-most coercible string that does not share // Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string. // a terminus with a more left-ward coercible string.
@ -8823,17 +8866,17 @@ function coerce (version, options) {
// Stop when we get a match that ends at the string end, since no // Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus. // coercible string can be more right-ward without the same terminus.
var next var next
while ((next = re[t.COERCERTL].exec(version)) && while ((next = safeRe[t.COERCERTL].exec(version)) &&
(!match || match.index + match[0].length !== version.length) (!match || match.index + match[0].length !== version.length)
) { ) {
if (!match || if (!match ||
next.index + next[0].length !== match.index + match[0].length) { next.index + next[0].length !== match.index + match[0].length) {
match = next match = next
} }
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
} }
// leave it in a clean state // leave it in a clean state
re[t.COERCERTL].lastIndex = -1 safeRe[t.COERCERTL].lastIndex = -1
} }
if (match === null) { if (match === null) {
@ -50262,14 +50305,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
this.saxParser.onopentag = (function(_this) { this.saxParser.onopentag = (function(_this) {
return function(node) { return function(node) {
var key, newValue, obj, processedKey, ref; var key, newValue, obj, processedKey, ref;
obj = {}; obj = Object.create(null);
obj[charkey] = ""; obj[charkey] = "";
if (!_this.options.ignoreAttrs) { if (!_this.options.ignoreAttrs) {
ref = node.attributes; ref = node.attributes;
for (key in ref) { for (key in ref) {
if (!hasProp.call(ref, key)) continue; if (!hasProp.call(ref, key)) continue;
if (!(attrkey in obj) && !_this.options.mergeAttrs) { if (!(attrkey in obj) && !_this.options.mergeAttrs) {
obj[attrkey] = {}; obj[attrkey] = Object.create(null);
} }
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key]; newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key; processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
@ -50319,8 +50362,12 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} }
} }
if (isEmpty(obj)) { if (isEmpty(obj)) {
if (typeof _this.options.emptyTag === 'function') {
obj = _this.options.emptyTag();
} else {
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr; obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
} }
}
if (_this.options.validator != null) { if (_this.options.validator != null) {
xpath = "/" + ((function() { xpath = "/" + ((function() {
var i, len, results; var i, len, results;
@ -50343,7 +50390,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} }
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') { if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
if (!_this.options.preserveChildrenOrder) { if (!_this.options.preserveChildrenOrder) {
node = {}; node = Object.create(null);
if (_this.options.attrkey in obj) { if (_this.options.attrkey in obj) {
node[_this.options.attrkey] = obj[_this.options.attrkey]; node[_this.options.attrkey] = obj[_this.options.attrkey];
delete obj[_this.options.attrkey]; delete obj[_this.options.attrkey];
@ -50358,7 +50405,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
obj = node; obj = node;
} else if (s) { } else if (s) {
s[_this.options.childkey] = s[_this.options.childkey] || []; s[_this.options.childkey] = s[_this.options.childkey] || [];
objClone = {}; objClone = Object.create(null);
for (key in obj) { for (key in obj) {
if (!hasProp.call(obj, key)) continue; if (!hasProp.call(obj, key)) continue;
objClone[key] = obj[key]; objClone[key] = obj[key];
@ -50375,7 +50422,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} else { } else {
if (_this.options.explicitRoot) { if (_this.options.explicitRoot) {
old = obj; old = obj;
obj = {}; obj = Object.create(null);
obj[nodeName] = old; obj[nodeName] = old;
} }
_this.resultObject = obj; _this.resultObject = obj;
@ -52577,7 +52624,7 @@ var CacheFilename;
(function (CacheFilename) { (function (CacheFilename) {
CacheFilename["Gzip"] = "cache.tgz"; CacheFilename["Gzip"] = "cache.tgz";
CacheFilename["Zstd"] = "cache.tzst"; CacheFilename["Zstd"] = "cache.tzst";
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); })(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
var CompressionMethod; var CompressionMethod;
(function (CompressionMethod) { (function (CompressionMethod) {
CompressionMethod["Gzip"] = "gzip"; CompressionMethod["Gzip"] = "gzip";
@ -52585,12 +52632,12 @@ var CompressionMethod;
// This enum is for earlier version of zstd that does not have --long support // This enum is for earlier version of zstd that does not have --long support
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd"; CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); })(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
var ArchiveToolType; var ArchiveToolType;
(function (ArchiveToolType) { (function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu"; ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd"; ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); })(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
// The default number of retry attempts. // The default number of retry attempts.
exports.DefaultRetryAttempts = 2; exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts. // The default delay in milliseconds between retry attempts.
@ -54988,7 +55035,7 @@ class HttpHeaders {
set(headerName, headerValue) { set(headerName, headerValue) {
this._headersMap[getHeaderKey(headerName)] = { this._headersMap[getHeaderKey(headerName)] = {
name: headerName, name: headerName,
value: headerValue.toString(), value: headerValue.toString().trim(),
}; };
} }
/** /**
@ -55128,7 +55175,7 @@ const Constants = {
/** /**
* The core-http version * The core-http version
*/ */
coreHttpVersion: "3.0.0", coreHttpVersion: "3.0.4",
/** /**
* Specifies HTTP. * Specifies HTTP.
*/ */
@ -55206,13 +55253,6 @@ const XML_CHARKEY = "_";
// Copyright (c) Microsoft Corporation. // Copyright (c) Microsoft Corporation.
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
/**
* A constant that indicates whether the environment is node.js or browser based.
*/
const isNode = typeof process !== "undefined" &&
!!process.version &&
!!process.versions &&
!!process.versions.node;
/** /**
* Encodes an URI. * Encodes an URI.
* *
@ -59893,7 +59933,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
factories.push(throttlingRetryPolicy()); factories.push(throttlingRetryPolicy());
} }
factories.push(deserializationPolicy(options.deserializationContentTypes)); factories.push(deserializationPolicy(options.deserializationContentTypes));
if (isNode) { if (coreUtil.isNode) {
factories.push(proxyPolicy(options.proxySettings)); factories.push(proxyPolicy(options.proxySettings));
} }
factories.push(logPolicy({ logger: logger.info })); factories.push(logPolicy({ logger: logger.info }));
@ -59925,7 +59965,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
if (isNode) { if (coreUtil.isNode) {
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
} }
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
@ -59938,7 +59978,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
requestPolicyFactories.push(authPolicyFactory); requestPolicyFactories.push(authPolicyFactory);
} }
requestPolicyFactories.push(logPolicy(loggingOptions)); requestPolicyFactories.push(logPolicy(loggingOptions));
if (isNode && pipelineOptions.decompressResponse === false) { if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
requestPolicyFactories.push(disableResponseDecompressionPolicy()); requestPolicyFactories.push(disableResponseDecompressionPolicy());
} }
return { return {
@ -60069,10 +60109,7 @@ function flattenResponse(_response, responseSpec) {
} }
function getCredentialScopes(options, baseUri) { function getCredentialScopes(options, baseUri) {
if (options === null || options === void 0 ? void 0 : options.credentialScopes) { if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
const scopes = options.credentialScopes; return options.credentialScopes;
return Array.isArray(scopes)
? scopes.map((scope) => new URL(scope).toString())
: new URL(scopes).toString();
} }
if (baseUri) { if (baseUri) {
return `${baseUri}/.default`; return `${baseUri}/.default`;
@ -60305,6 +60342,10 @@ Object.defineProperty(exports, 'delay', {
enumerable: true, enumerable: true,
get: function () { return coreUtil.delay; } get: function () { return coreUtil.delay; }
}); });
Object.defineProperty(exports, 'isNode', {
enumerable: true,
get: function () { return coreUtil.isNode; }
});
Object.defineProperty(exports, 'isTokenCredential', { Object.defineProperty(exports, 'isTokenCredential', {
enumerable: true, enumerable: true,
get: function () { return coreAuth.isTokenCredential; } get: function () { return coreAuth.isTokenCredential; }
@ -60344,7 +60385,6 @@ exports.generateUuid = generateUuid;
exports.getDefaultProxySettings = getDefaultProxySettings; exports.getDefaultProxySettings = getDefaultProxySettings;
exports.getDefaultUserAgentValue = getDefaultUserAgentValue; exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
exports.isDuration = isDuration; exports.isDuration = isDuration;
exports.isNode = isNode;
exports.isValidUuid = isValidUuid; exports.isValidUuid = isValidUuid;
exports.keepAlivePolicy = keepAlivePolicy; exports.keepAlivePolicy = keepAlivePolicy;
exports.logPolicy = logPolicy; exports.logPolicy = logPolicy;

152
dist/restore/index.js vendored
View file

@ -1136,10 +1136,9 @@ function resolvePaths(patterns) {
implicitDescendants: false implicitDescendants: false
}); });
try { try {
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
_c = _g.value; _c = _g.value;
_e = false; _e = false;
try {
const file = _c; const file = _c;
const relativeFile = path const relativeFile = path
.relative(workspace, file) .relative(workspace, file)
@ -1154,10 +1153,6 @@ function resolvePaths(patterns) {
paths.push(`${relativeFile}`); paths.push(`${relativeFile}`);
} }
} }
finally {
_e = true;
}
}
} }
catch (e_1_1) { e_1 = { error: e_1_1 }; } catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally { finally {
@ -3389,7 +3384,8 @@ function createHttpClient() {
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
} }
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths; // don't pass changes upstream
const components = paths.slice();
// Add compression method to cache version to restore // Add compression method to cache version to restore
// compressed cache as per compression method // compressed cache as per compression method
if (compressionMethod) { if (compressionMethod) {
@ -7276,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
// Max safe segment length for coercion. // Max safe segment length for coercion.
var MAX_SAFE_COMPONENT_LENGTH = 16 var MAX_SAFE_COMPONENT_LENGTH = 16
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
// The actual regexps go on exports.re // The actual regexps go on exports.re
var re = exports.re = [] var re = exports.re = []
var safeRe = exports.safeRe = []
var src = exports.src = [] var src = exports.src = []
var t = exports.tokens = {} var t = exports.tokens = {}
var R = 0 var R = 0
@ -7286,6 +7285,31 @@ function tok (n) {
t[n] = R++ t[n] = R++
} }
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
var safeRegexReplacements = [
['\\s', 1],
['\\d', MAX_LENGTH],
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
]
function makeSafeRe (value) {
for (var i = 0; i < safeRegexReplacements.length; i++) {
var token = safeRegexReplacements[i][0]
var max = safeRegexReplacements[i][1]
value = value
.split(token + '*').join(token + '{0,' + max + '}')
.split(token + '+').join(token + '{1,' + max + '}')
}
return value
}
// The following Regular Expressions can be used for tokenizing, // The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings. // validating, and parsing SemVer version strings.
@ -7295,14 +7319,14 @@ function tok (n) {
tok('NUMERICIDENTIFIER') tok('NUMERICIDENTIFIER')
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
tok('NUMERICIDENTIFIERLOOSE') tok('NUMERICIDENTIFIERLOOSE')
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
// ## Non-numeric Identifier // ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or // Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens. // more letters, digits, or hyphens.
tok('NONNUMERICIDENTIFIER') tok('NONNUMERICIDENTIFIER')
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
// ## Main Version // ## Main Version
// Three dot-separated numeric identifiers. // Three dot-separated numeric identifiers.
@ -7344,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
// Any combination of digits, letters, or hyphens. // Any combination of digits, letters, or hyphens.
tok('BUILDIDENTIFIER') tok('BUILDIDENTIFIER')
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
// ## Build Metadata // ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata // Plus sign, followed by one or more period-separated build metadata
@ -7424,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
'(?:$|[^\\d])' '(?:$|[^\\d])'
tok('COERCERTL') tok('COERCERTL')
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
// Tilde ranges. // Tilde ranges.
// Meaning is "reasonably at or greater than" // Meaning is "reasonably at or greater than"
@ -7433,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
tok('TILDETRIM') tok('TILDETRIM')
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
var tildeTrimReplace = '$1~' var tildeTrimReplace = '$1~'
tok('TILDE') tok('TILDE')
@ -7448,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
tok('CARETTRIM') tok('CARETTRIM')
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
var caretTrimReplace = '$1^' var caretTrimReplace = '$1^'
tok('CARET') tok('CARET')
@ -7469,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
// this one has to use the /g flag // this one has to use the /g flag
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
var comparatorTrimReplace = '$1$2$3' var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4` // Something like `1.2.3 - 1.2.4`
@ -7497,6 +7525,14 @@ for (var i = 0; i < R; i++) {
debug(i, src[i]) debug(i, src[i])
if (!re[i]) { if (!re[i]) {
re[i] = new RegExp(src[i]) re[i] = new RegExp(src[i])
// Replace all greedy whitespace to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
safeRe[i] = new RegExp(makeSafeRe(src[i]))
} }
} }
@ -7521,7 +7557,7 @@ function parse (version, options) {
return null return null
} }
var r = options.loose ? re[t.LOOSE] : re[t.FULL] var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
if (!r.test(version)) { if (!r.test(version)) {
return null return null
} }
@ -7576,7 +7612,7 @@ function SemVer (version, options) {
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
if (!m) { if (!m) {
throw new TypeError('Invalid Version: ' + version) throw new TypeError('Invalid Version: ' + version)
@ -8021,6 +8057,7 @@ function Comparator (comp, options) {
return new Comparator(comp, options) return new Comparator(comp, options)
} }
comp = comp.trim().split(/\s+/).join(' ')
debug('comparator', comp, options) debug('comparator', comp, options)
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
@ -8037,7 +8074,7 @@ function Comparator (comp, options) {
var ANY = {} var ANY = {}
Comparator.prototype.parse = function (comp) { Comparator.prototype.parse = function (comp) {
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var m = comp.match(r) var m = comp.match(r)
if (!m) { if (!m) {
@ -8161,9 +8198,16 @@ function Range (range, options) {
this.loose = !!options.loose this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease this.includePrerelease = !!options.includePrerelease
// First, split based on boolean or || // First reduce all whitespace as much as possible so we do not have to rely
// on potentially slow regexes like \s*. This is then stored and used for
// future error messages as well.
this.raw = range this.raw = range
this.set = range.split(/\s*\|\|\s*/).map(function (range) { .trim()
.split(/\s+/)
.join(' ')
// First, split based on boolean or ||
this.set = this.raw.split('||').map(function (range) {
return this.parseRange(range.trim()) return this.parseRange(range.trim())
}, this).filter(function (c) { }, this).filter(function (c) {
// throw out any that are not relevant for whatever reason // throw out any that are not relevant for whatever reason
@ -8171,7 +8215,7 @@ function Range (range, options) {
}) })
if (!this.set.length) { if (!this.set.length) {
throw new TypeError('Invalid SemVer Range: ' + range) throw new TypeError('Invalid SemVer Range: ' + this.raw)
} }
this.format() this.format()
@ -8190,20 +8234,19 @@ Range.prototype.toString = function () {
Range.prototype.parseRange = function (range) { Range.prototype.parseRange = function (range) {
var loose = this.options.loose var loose = this.options.loose
range = range.trim()
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace) range = range.replace(hr, hyphenReplace)
debug('hyphen replace', range) debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range, re[t.COMPARATORTRIM]) debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
// `~ 1.2.3` => `~1.2.3` // `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace) range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
// `^ 1.2.3` => `^1.2.3` // `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace) range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
// normalize spaces // normalize spaces
range = range.split(/\s+/).join(' ') range = range.split(/\s+/).join(' ')
@ -8211,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
// At this point, the range is completely trimmed and // At this point, the range is completely trimmed and
// ready to be split into comparators. // ready to be split into comparators.
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var set = range.split(' ').map(function (comp) { var set = range.split(' ').map(function (comp) {
return parseComparator(comp, this.options) return parseComparator(comp, this.options)
}, this).join(' ').split(/\s+/) }, this).join(' ').split(/\s+/)
@ -8311,7 +8354,7 @@ function replaceTildes (comp, options) {
} }
function replaceTilde (comp, options) { function replaceTilde (comp, options) {
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('tilde', comp, _, M, m, p, pr) debug('tilde', comp, _, M, m, p, pr)
var ret var ret
@ -8352,7 +8395,7 @@ function replaceCarets (comp, options) {
function replaceCaret (comp, options) { function replaceCaret (comp, options) {
debug('caret', comp, options) debug('caret', comp, options)
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('caret', comp, _, M, m, p, pr) debug('caret', comp, _, M, m, p, pr)
var ret var ret
@ -8411,7 +8454,7 @@ function replaceXRanges (comp, options) {
function replaceXRange (comp, options) { function replaceXRange (comp, options) {
comp = comp.trim() comp = comp.trim()
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
return comp.replace(r, function (ret, gtlt, M, m, p, pr) { return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
debug('xRange', comp, ret, gtlt, M, m, p, pr) debug('xRange', comp, ret, gtlt, M, m, p, pr)
var xM = isX(M) var xM = isX(M)
@ -8486,7 +8529,7 @@ function replaceXRange (comp, options) {
function replaceStars (comp, options) { function replaceStars (comp, options) {
debug('replaceStars', comp, options) debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets! // Looseness is ignored here. star is always as loose as it gets!
return comp.trim().replace(re[t.STAR], '') return comp.trim().replace(safeRe[t.STAR], '')
} }
// This function is passed to string.replace(re[t.HYPHENRANGE]) // This function is passed to string.replace(re[t.HYPHENRANGE])
@ -8812,7 +8855,7 @@ function coerce (version, options) {
var match = null var match = null
if (!options.rtl) { if (!options.rtl) {
match = version.match(re[t.COERCE]) match = version.match(safeRe[t.COERCE])
} else { } else {
// Find the right-most coercible string that does not share // Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string. // a terminus with a more left-ward coercible string.
@ -8823,17 +8866,17 @@ function coerce (version, options) {
// Stop when we get a match that ends at the string end, since no // Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus. // coercible string can be more right-ward without the same terminus.
var next var next
while ((next = re[t.COERCERTL].exec(version)) && while ((next = safeRe[t.COERCERTL].exec(version)) &&
(!match || match.index + match[0].length !== version.length) (!match || match.index + match[0].length !== version.length)
) { ) {
if (!match || if (!match ||
next.index + next[0].length !== match.index + match[0].length) { next.index + next[0].length !== match.index + match[0].length) {
match = next match = next
} }
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
} }
// leave it in a clean state // leave it in a clean state
re[t.COERCERTL].lastIndex = -1 safeRe[t.COERCERTL].lastIndex = -1
} }
if (match === null) { if (match === null) {
@ -50262,14 +50305,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
this.saxParser.onopentag = (function(_this) { this.saxParser.onopentag = (function(_this) {
return function(node) { return function(node) {
var key, newValue, obj, processedKey, ref; var key, newValue, obj, processedKey, ref;
obj = {}; obj = Object.create(null);
obj[charkey] = ""; obj[charkey] = "";
if (!_this.options.ignoreAttrs) { if (!_this.options.ignoreAttrs) {
ref = node.attributes; ref = node.attributes;
for (key in ref) { for (key in ref) {
if (!hasProp.call(ref, key)) continue; if (!hasProp.call(ref, key)) continue;
if (!(attrkey in obj) && !_this.options.mergeAttrs) { if (!(attrkey in obj) && !_this.options.mergeAttrs) {
obj[attrkey] = {}; obj[attrkey] = Object.create(null);
} }
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key]; newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key; processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
@ -50319,8 +50362,12 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} }
} }
if (isEmpty(obj)) { if (isEmpty(obj)) {
if (typeof _this.options.emptyTag === 'function') {
obj = _this.options.emptyTag();
} else {
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr; obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
} }
}
if (_this.options.validator != null) { if (_this.options.validator != null) {
xpath = "/" + ((function() { xpath = "/" + ((function() {
var i, len, results; var i, len, results;
@ -50343,7 +50390,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} }
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') { if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
if (!_this.options.preserveChildrenOrder) { if (!_this.options.preserveChildrenOrder) {
node = {}; node = Object.create(null);
if (_this.options.attrkey in obj) { if (_this.options.attrkey in obj) {
node[_this.options.attrkey] = obj[_this.options.attrkey]; node[_this.options.attrkey] = obj[_this.options.attrkey];
delete obj[_this.options.attrkey]; delete obj[_this.options.attrkey];
@ -50358,7 +50405,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
obj = node; obj = node;
} else if (s) { } else if (s) {
s[_this.options.childkey] = s[_this.options.childkey] || []; s[_this.options.childkey] = s[_this.options.childkey] || [];
objClone = {}; objClone = Object.create(null);
for (key in obj) { for (key in obj) {
if (!hasProp.call(obj, key)) continue; if (!hasProp.call(obj, key)) continue;
objClone[key] = obj[key]; objClone[key] = obj[key];
@ -50375,7 +50422,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} else { } else {
if (_this.options.explicitRoot) { if (_this.options.explicitRoot) {
old = obj; old = obj;
obj = {}; obj = Object.create(null);
obj[nodeName] = old; obj[nodeName] = old;
} }
_this.resultObject = obj; _this.resultObject = obj;
@ -52577,7 +52624,7 @@ var CacheFilename;
(function (CacheFilename) { (function (CacheFilename) {
CacheFilename["Gzip"] = "cache.tgz"; CacheFilename["Gzip"] = "cache.tgz";
CacheFilename["Zstd"] = "cache.tzst"; CacheFilename["Zstd"] = "cache.tzst";
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); })(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
var CompressionMethod; var CompressionMethod;
(function (CompressionMethod) { (function (CompressionMethod) {
CompressionMethod["Gzip"] = "gzip"; CompressionMethod["Gzip"] = "gzip";
@ -52585,12 +52632,12 @@ var CompressionMethod;
// This enum is for earlier version of zstd that does not have --long support // This enum is for earlier version of zstd that does not have --long support
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd"; CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); })(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
var ArchiveToolType; var ArchiveToolType;
(function (ArchiveToolType) { (function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu"; ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd"; ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); })(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
// The default number of retry attempts. // The default number of retry attempts.
exports.DefaultRetryAttempts = 2; exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts. // The default delay in milliseconds between retry attempts.
@ -54988,7 +55035,7 @@ class HttpHeaders {
set(headerName, headerValue) { set(headerName, headerValue) {
this._headersMap[getHeaderKey(headerName)] = { this._headersMap[getHeaderKey(headerName)] = {
name: headerName, name: headerName,
value: headerValue.toString(), value: headerValue.toString().trim(),
}; };
} }
/** /**
@ -55128,7 +55175,7 @@ const Constants = {
/** /**
* The core-http version * The core-http version
*/ */
coreHttpVersion: "3.0.0", coreHttpVersion: "3.0.4",
/** /**
* Specifies HTTP. * Specifies HTTP.
*/ */
@ -55206,13 +55253,6 @@ const XML_CHARKEY = "_";
// Copyright (c) Microsoft Corporation. // Copyright (c) Microsoft Corporation.
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
/**
* A constant that indicates whether the environment is node.js or browser based.
*/
const isNode = typeof process !== "undefined" &&
!!process.version &&
!!process.versions &&
!!process.versions.node;
/** /**
* Encodes an URI. * Encodes an URI.
* *
@ -59893,7 +59933,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
factories.push(throttlingRetryPolicy()); factories.push(throttlingRetryPolicy());
} }
factories.push(deserializationPolicy(options.deserializationContentTypes)); factories.push(deserializationPolicy(options.deserializationContentTypes));
if (isNode) { if (coreUtil.isNode) {
factories.push(proxyPolicy(options.proxySettings)); factories.push(proxyPolicy(options.proxySettings));
} }
factories.push(logPolicy({ logger: logger.info })); factories.push(logPolicy({ logger: logger.info }));
@ -59925,7 +59965,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
if (isNode) { if (coreUtil.isNode) {
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
} }
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
@ -59938,7 +59978,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
requestPolicyFactories.push(authPolicyFactory); requestPolicyFactories.push(authPolicyFactory);
} }
requestPolicyFactories.push(logPolicy(loggingOptions)); requestPolicyFactories.push(logPolicy(loggingOptions));
if (isNode && pipelineOptions.decompressResponse === false) { if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
requestPolicyFactories.push(disableResponseDecompressionPolicy()); requestPolicyFactories.push(disableResponseDecompressionPolicy());
} }
return { return {
@ -60069,10 +60109,7 @@ function flattenResponse(_response, responseSpec) {
} }
function getCredentialScopes(options, baseUri) { function getCredentialScopes(options, baseUri) {
if (options === null || options === void 0 ? void 0 : options.credentialScopes) { if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
const scopes = options.credentialScopes; return options.credentialScopes;
return Array.isArray(scopes)
? scopes.map((scope) => new URL(scope).toString())
: new URL(scopes).toString();
} }
if (baseUri) { if (baseUri) {
return `${baseUri}/.default`; return `${baseUri}/.default`;
@ -60305,6 +60342,10 @@ Object.defineProperty(exports, 'delay', {
enumerable: true, enumerable: true,
get: function () { return coreUtil.delay; } get: function () { return coreUtil.delay; }
}); });
Object.defineProperty(exports, 'isNode', {
enumerable: true,
get: function () { return coreUtil.isNode; }
});
Object.defineProperty(exports, 'isTokenCredential', { Object.defineProperty(exports, 'isTokenCredential', {
enumerable: true, enumerable: true,
get: function () { return coreAuth.isTokenCredential; } get: function () { return coreAuth.isTokenCredential; }
@ -60344,7 +60385,6 @@ exports.generateUuid = generateUuid;
exports.getDefaultProxySettings = getDefaultProxySettings; exports.getDefaultProxySettings = getDefaultProxySettings;
exports.getDefaultUserAgentValue = getDefaultUserAgentValue; exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
exports.isDuration = isDuration; exports.isDuration = isDuration;
exports.isNode = isNode;
exports.isValidUuid = isValidUuid; exports.isValidUuid = isValidUuid;
exports.keepAlivePolicy = keepAlivePolicy; exports.keepAlivePolicy = keepAlivePolicy;
exports.logPolicy = logPolicy; exports.logPolicy = logPolicy;

View file

@ -1192,10 +1192,9 @@ function resolvePaths(patterns) {
implicitDescendants: false implicitDescendants: false
}); });
try { try {
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
_c = _g.value; _c = _g.value;
_e = false; _e = false;
try {
const file = _c; const file = _c;
const relativeFile = path const relativeFile = path
.relative(workspace, file) .relative(workspace, file)
@ -1210,10 +1209,6 @@ function resolvePaths(patterns) {
paths.push(`${relativeFile}`); paths.push(`${relativeFile}`);
} }
} }
finally {
_e = true;
}
}
} }
catch (e_1_1) { e_1 = { error: e_1_1 }; } catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally { finally {
@ -3445,7 +3440,8 @@ function createHttpClient() {
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
} }
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths; // don't pass changes upstream
const components = paths.slice();
// Add compression method to cache version to restore // Add compression method to cache version to restore
// compressed cache as per compression method // compressed cache as per compression method
if (compressionMethod) { if (compressionMethod) {
@ -7332,8 +7328,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
// Max safe segment length for coercion. // Max safe segment length for coercion.
var MAX_SAFE_COMPONENT_LENGTH = 16 var MAX_SAFE_COMPONENT_LENGTH = 16
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
// The actual regexps go on exports.re // The actual regexps go on exports.re
var re = exports.re = [] var re = exports.re = []
var safeRe = exports.safeRe = []
var src = exports.src = [] var src = exports.src = []
var t = exports.tokens = {} var t = exports.tokens = {}
var R = 0 var R = 0
@ -7342,6 +7341,31 @@ function tok (n) {
t[n] = R++ t[n] = R++
} }
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
var safeRegexReplacements = [
['\\s', 1],
['\\d', MAX_LENGTH],
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
]
function makeSafeRe (value) {
for (var i = 0; i < safeRegexReplacements.length; i++) {
var token = safeRegexReplacements[i][0]
var max = safeRegexReplacements[i][1]
value = value
.split(token + '*').join(token + '{0,' + max + '}')
.split(token + '+').join(token + '{1,' + max + '}')
}
return value
}
// The following Regular Expressions can be used for tokenizing, // The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings. // validating, and parsing SemVer version strings.
@ -7351,14 +7375,14 @@ function tok (n) {
tok('NUMERICIDENTIFIER') tok('NUMERICIDENTIFIER')
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
tok('NUMERICIDENTIFIERLOOSE') tok('NUMERICIDENTIFIERLOOSE')
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
// ## Non-numeric Identifier // ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or // Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens. // more letters, digits, or hyphens.
tok('NONNUMERICIDENTIFIER') tok('NONNUMERICIDENTIFIER')
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
// ## Main Version // ## Main Version
// Three dot-separated numeric identifiers. // Three dot-separated numeric identifiers.
@ -7400,7 +7424,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
// Any combination of digits, letters, or hyphens. // Any combination of digits, letters, or hyphens.
tok('BUILDIDENTIFIER') tok('BUILDIDENTIFIER')
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
// ## Build Metadata // ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata // Plus sign, followed by one or more period-separated build metadata
@ -7480,6 +7504,7 @@ src[t.COERCE] = '(^|[^\\d])' +
'(?:$|[^\\d])' '(?:$|[^\\d])'
tok('COERCERTL') tok('COERCERTL')
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
// Tilde ranges. // Tilde ranges.
// Meaning is "reasonably at or greater than" // Meaning is "reasonably at or greater than"
@ -7489,6 +7514,7 @@ src[t.LONETILDE] = '(?:~>?)'
tok('TILDETRIM') tok('TILDETRIM')
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
var tildeTrimReplace = '$1~' var tildeTrimReplace = '$1~'
tok('TILDE') tok('TILDE')
@ -7504,6 +7530,7 @@ src[t.LONECARET] = '(?:\\^)'
tok('CARETTRIM') tok('CARETTRIM')
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
var caretTrimReplace = '$1^' var caretTrimReplace = '$1^'
tok('CARET') tok('CARET')
@ -7525,6 +7552,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
// this one has to use the /g flag // this one has to use the /g flag
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
var comparatorTrimReplace = '$1$2$3' var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4` // Something like `1.2.3 - 1.2.4`
@ -7553,6 +7581,14 @@ for (var i = 0; i < R; i++) {
debug(i, src[i]) debug(i, src[i])
if (!re[i]) { if (!re[i]) {
re[i] = new RegExp(src[i]) re[i] = new RegExp(src[i])
// Replace all greedy whitespace to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
safeRe[i] = new RegExp(makeSafeRe(src[i]))
} }
} }
@ -7577,7 +7613,7 @@ function parse (version, options) {
return null return null
} }
var r = options.loose ? re[t.LOOSE] : re[t.FULL] var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
if (!r.test(version)) { if (!r.test(version)) {
return null return null
} }
@ -7632,7 +7668,7 @@ function SemVer (version, options) {
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
if (!m) { if (!m) {
throw new TypeError('Invalid Version: ' + version) throw new TypeError('Invalid Version: ' + version)
@ -8077,6 +8113,7 @@ function Comparator (comp, options) {
return new Comparator(comp, options) return new Comparator(comp, options)
} }
comp = comp.trim().split(/\s+/).join(' ')
debug('comparator', comp, options) debug('comparator', comp, options)
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
@ -8093,7 +8130,7 @@ function Comparator (comp, options) {
var ANY = {} var ANY = {}
Comparator.prototype.parse = function (comp) { Comparator.prototype.parse = function (comp) {
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var m = comp.match(r) var m = comp.match(r)
if (!m) { if (!m) {
@ -8217,9 +8254,16 @@ function Range (range, options) {
this.loose = !!options.loose this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease this.includePrerelease = !!options.includePrerelease
// First, split based on boolean or || // First reduce all whitespace as much as possible so we do not have to rely
// on potentially slow regexes like \s*. This is then stored and used for
// future error messages as well.
this.raw = range this.raw = range
this.set = range.split(/\s*\|\|\s*/).map(function (range) { .trim()
.split(/\s+/)
.join(' ')
// First, split based on boolean or ||
this.set = this.raw.split('||').map(function (range) {
return this.parseRange(range.trim()) return this.parseRange(range.trim())
}, this).filter(function (c) { }, this).filter(function (c) {
// throw out any that are not relevant for whatever reason // throw out any that are not relevant for whatever reason
@ -8227,7 +8271,7 @@ function Range (range, options) {
}) })
if (!this.set.length) { if (!this.set.length) {
throw new TypeError('Invalid SemVer Range: ' + range) throw new TypeError('Invalid SemVer Range: ' + this.raw)
} }
this.format() this.format()
@ -8246,20 +8290,19 @@ Range.prototype.toString = function () {
Range.prototype.parseRange = function (range) { Range.prototype.parseRange = function (range) {
var loose = this.options.loose var loose = this.options.loose
range = range.trim()
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace) range = range.replace(hr, hyphenReplace)
debug('hyphen replace', range) debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range, re[t.COMPARATORTRIM]) debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
// `~ 1.2.3` => `~1.2.3` // `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace) range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
// `^ 1.2.3` => `^1.2.3` // `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace) range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
// normalize spaces // normalize spaces
range = range.split(/\s+/).join(' ') range = range.split(/\s+/).join(' ')
@ -8267,7 +8310,7 @@ Range.prototype.parseRange = function (range) {
// At this point, the range is completely trimmed and // At this point, the range is completely trimmed and
// ready to be split into comparators. // ready to be split into comparators.
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var set = range.split(' ').map(function (comp) { var set = range.split(' ').map(function (comp) {
return parseComparator(comp, this.options) return parseComparator(comp, this.options)
}, this).join(' ').split(/\s+/) }, this).join(' ').split(/\s+/)
@ -8367,7 +8410,7 @@ function replaceTildes (comp, options) {
} }
function replaceTilde (comp, options) { function replaceTilde (comp, options) {
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('tilde', comp, _, M, m, p, pr) debug('tilde', comp, _, M, m, p, pr)
var ret var ret
@ -8408,7 +8451,7 @@ function replaceCarets (comp, options) {
function replaceCaret (comp, options) { function replaceCaret (comp, options) {
debug('caret', comp, options) debug('caret', comp, options)
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('caret', comp, _, M, m, p, pr) debug('caret', comp, _, M, m, p, pr)
var ret var ret
@ -8467,7 +8510,7 @@ function replaceXRanges (comp, options) {
function replaceXRange (comp, options) { function replaceXRange (comp, options) {
comp = comp.trim() comp = comp.trim()
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
return comp.replace(r, function (ret, gtlt, M, m, p, pr) { return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
debug('xRange', comp, ret, gtlt, M, m, p, pr) debug('xRange', comp, ret, gtlt, M, m, p, pr)
var xM = isX(M) var xM = isX(M)
@ -8542,7 +8585,7 @@ function replaceXRange (comp, options) {
function replaceStars (comp, options) { function replaceStars (comp, options) {
debug('replaceStars', comp, options) debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets! // Looseness is ignored here. star is always as loose as it gets!
return comp.trim().replace(re[t.STAR], '') return comp.trim().replace(safeRe[t.STAR], '')
} }
// This function is passed to string.replace(re[t.HYPHENRANGE]) // This function is passed to string.replace(re[t.HYPHENRANGE])
@ -8868,7 +8911,7 @@ function coerce (version, options) {
var match = null var match = null
if (!options.rtl) { if (!options.rtl) {
match = version.match(re[t.COERCE]) match = version.match(safeRe[t.COERCE])
} else { } else {
// Find the right-most coercible string that does not share // Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string. // a terminus with a more left-ward coercible string.
@ -8879,17 +8922,17 @@ function coerce (version, options) {
// Stop when we get a match that ends at the string end, since no // Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus. // coercible string can be more right-ward without the same terminus.
var next var next
while ((next = re[t.COERCERTL].exec(version)) && while ((next = safeRe[t.COERCERTL].exec(version)) &&
(!match || match.index + match[0].length !== version.length) (!match || match.index + match[0].length !== version.length)
) { ) {
if (!match || if (!match ||
next.index + next[0].length !== match.index + match[0].length) { next.index + next[0].length !== match.index + match[0].length) {
match = next match = next
} }
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
} }
// leave it in a clean state // leave it in a clean state
re[t.COERCERTL].lastIndex = -1 safeRe[t.COERCERTL].lastIndex = -1
} }
if (match === null) { if (match === null) {
@ -50269,14 +50312,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
this.saxParser.onopentag = (function(_this) { this.saxParser.onopentag = (function(_this) {
return function(node) { return function(node) {
var key, newValue, obj, processedKey, ref; var key, newValue, obj, processedKey, ref;
obj = {}; obj = Object.create(null);
obj[charkey] = ""; obj[charkey] = "";
if (!_this.options.ignoreAttrs) { if (!_this.options.ignoreAttrs) {
ref = node.attributes; ref = node.attributes;
for (key in ref) { for (key in ref) {
if (!hasProp.call(ref, key)) continue; if (!hasProp.call(ref, key)) continue;
if (!(attrkey in obj) && !_this.options.mergeAttrs) { if (!(attrkey in obj) && !_this.options.mergeAttrs) {
obj[attrkey] = {}; obj[attrkey] = Object.create(null);
} }
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key]; newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key; processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
@ -50326,8 +50369,12 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} }
} }
if (isEmpty(obj)) { if (isEmpty(obj)) {
if (typeof _this.options.emptyTag === 'function') {
obj = _this.options.emptyTag();
} else {
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr; obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
} }
}
if (_this.options.validator != null) { if (_this.options.validator != null) {
xpath = "/" + ((function() { xpath = "/" + ((function() {
var i, len, results; var i, len, results;
@ -50350,7 +50397,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} }
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') { if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
if (!_this.options.preserveChildrenOrder) { if (!_this.options.preserveChildrenOrder) {
node = {}; node = Object.create(null);
if (_this.options.attrkey in obj) { if (_this.options.attrkey in obj) {
node[_this.options.attrkey] = obj[_this.options.attrkey]; node[_this.options.attrkey] = obj[_this.options.attrkey];
delete obj[_this.options.attrkey]; delete obj[_this.options.attrkey];
@ -50365,7 +50412,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
obj = node; obj = node;
} else if (s) { } else if (s) {
s[_this.options.childkey] = s[_this.options.childkey] || []; s[_this.options.childkey] = s[_this.options.childkey] || [];
objClone = {}; objClone = Object.create(null);
for (key in obj) { for (key in obj) {
if (!hasProp.call(obj, key)) continue; if (!hasProp.call(obj, key)) continue;
objClone[key] = obj[key]; objClone[key] = obj[key];
@ -50382,7 +50429,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} else { } else {
if (_this.options.explicitRoot) { if (_this.options.explicitRoot) {
old = obj; old = obj;
obj = {}; obj = Object.create(null);
obj[nodeName] = old; obj[nodeName] = old;
} }
_this.resultObject = obj; _this.resultObject = obj;
@ -52584,7 +52631,7 @@ var CacheFilename;
(function (CacheFilename) { (function (CacheFilename) {
CacheFilename["Gzip"] = "cache.tgz"; CacheFilename["Gzip"] = "cache.tgz";
CacheFilename["Zstd"] = "cache.tzst"; CacheFilename["Zstd"] = "cache.tzst";
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); })(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
var CompressionMethod; var CompressionMethod;
(function (CompressionMethod) { (function (CompressionMethod) {
CompressionMethod["Gzip"] = "gzip"; CompressionMethod["Gzip"] = "gzip";
@ -52592,12 +52639,12 @@ var CompressionMethod;
// This enum is for earlier version of zstd that does not have --long support // This enum is for earlier version of zstd that does not have --long support
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd"; CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); })(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
var ArchiveToolType; var ArchiveToolType;
(function (ArchiveToolType) { (function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu"; ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd"; ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); })(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
// The default number of retry attempts. // The default number of retry attempts.
exports.DefaultRetryAttempts = 2; exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts. // The default delay in milliseconds between retry attempts.
@ -54995,7 +55042,7 @@ class HttpHeaders {
set(headerName, headerValue) { set(headerName, headerValue) {
this._headersMap[getHeaderKey(headerName)] = { this._headersMap[getHeaderKey(headerName)] = {
name: headerName, name: headerName,
value: headerValue.toString(), value: headerValue.toString().trim(),
}; };
} }
/** /**
@ -55135,7 +55182,7 @@ const Constants = {
/** /**
* The core-http version * The core-http version
*/ */
coreHttpVersion: "3.0.0", coreHttpVersion: "3.0.4",
/** /**
* Specifies HTTP. * Specifies HTTP.
*/ */
@ -55213,13 +55260,6 @@ const XML_CHARKEY = "_";
// Copyright (c) Microsoft Corporation. // Copyright (c) Microsoft Corporation.
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
/**
* A constant that indicates whether the environment is node.js or browser based.
*/
const isNode = typeof process !== "undefined" &&
!!process.version &&
!!process.versions &&
!!process.versions.node;
/** /**
* Encodes an URI. * Encodes an URI.
* *
@ -59900,7 +59940,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
factories.push(throttlingRetryPolicy()); factories.push(throttlingRetryPolicy());
} }
factories.push(deserializationPolicy(options.deserializationContentTypes)); factories.push(deserializationPolicy(options.deserializationContentTypes));
if (isNode) { if (coreUtil.isNode) {
factories.push(proxyPolicy(options.proxySettings)); factories.push(proxyPolicy(options.proxySettings));
} }
factories.push(logPolicy({ logger: logger.info })); factories.push(logPolicy({ logger: logger.info }));
@ -59932,7 +59972,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
if (isNode) { if (coreUtil.isNode) {
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
} }
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
@ -59945,7 +59985,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
requestPolicyFactories.push(authPolicyFactory); requestPolicyFactories.push(authPolicyFactory);
} }
requestPolicyFactories.push(logPolicy(loggingOptions)); requestPolicyFactories.push(logPolicy(loggingOptions));
if (isNode && pipelineOptions.decompressResponse === false) { if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
requestPolicyFactories.push(disableResponseDecompressionPolicy()); requestPolicyFactories.push(disableResponseDecompressionPolicy());
} }
return { return {
@ -60076,10 +60116,7 @@ function flattenResponse(_response, responseSpec) {
} }
function getCredentialScopes(options, baseUri) { function getCredentialScopes(options, baseUri) {
if (options === null || options === void 0 ? void 0 : options.credentialScopes) { if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
const scopes = options.credentialScopes; return options.credentialScopes;
return Array.isArray(scopes)
? scopes.map((scope) => new URL(scope).toString())
: new URL(scopes).toString();
} }
if (baseUri) { if (baseUri) {
return `${baseUri}/.default`; return `${baseUri}/.default`;
@ -60312,6 +60349,10 @@ Object.defineProperty(exports, 'delay', {
enumerable: true, enumerable: true,
get: function () { return coreUtil.delay; } get: function () { return coreUtil.delay; }
}); });
Object.defineProperty(exports, 'isNode', {
enumerable: true,
get: function () { return coreUtil.isNode; }
});
Object.defineProperty(exports, 'isTokenCredential', { Object.defineProperty(exports, 'isTokenCredential', {
enumerable: true, enumerable: true,
get: function () { return coreAuth.isTokenCredential; } get: function () { return coreAuth.isTokenCredential; }
@ -60351,7 +60392,6 @@ exports.generateUuid = generateUuid;
exports.getDefaultProxySettings = getDefaultProxySettings; exports.getDefaultProxySettings = getDefaultProxySettings;
exports.getDefaultUserAgentValue = getDefaultUserAgentValue; exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
exports.isDuration = isDuration; exports.isDuration = isDuration;
exports.isNode = isNode;
exports.isValidUuid = isValidUuid; exports.isValidUuid = isValidUuid;
exports.keepAlivePolicy = keepAlivePolicy; exports.keepAlivePolicy = keepAlivePolicy;
exports.logPolicy = logPolicy; exports.logPolicy = logPolicy;

152
dist/save/index.js vendored
View file

@ -1136,10 +1136,9 @@ function resolvePaths(patterns) {
implicitDescendants: false implicitDescendants: false
}); });
try { try {
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
_c = _g.value; _c = _g.value;
_e = false; _e = false;
try {
const file = _c; const file = _c;
const relativeFile = path const relativeFile = path
.relative(workspace, file) .relative(workspace, file)
@ -1154,10 +1153,6 @@ function resolvePaths(patterns) {
paths.push(`${relativeFile}`); paths.push(`${relativeFile}`);
} }
} }
finally {
_e = true;
}
}
} }
catch (e_1_1) { e_1 = { error: e_1_1 }; } catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally { finally {
@ -3389,7 +3384,8 @@ function createHttpClient() {
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
} }
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths; // don't pass changes upstream
const components = paths.slice();
// Add compression method to cache version to restore // Add compression method to cache version to restore
// compressed cache as per compression method // compressed cache as per compression method
if (compressionMethod) { if (compressionMethod) {
@ -7276,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
// Max safe segment length for coercion. // Max safe segment length for coercion.
var MAX_SAFE_COMPONENT_LENGTH = 16 var MAX_SAFE_COMPONENT_LENGTH = 16
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
// The actual regexps go on exports.re // The actual regexps go on exports.re
var re = exports.re = [] var re = exports.re = []
var safeRe = exports.safeRe = []
var src = exports.src = [] var src = exports.src = []
var t = exports.tokens = {} var t = exports.tokens = {}
var R = 0 var R = 0
@ -7286,6 +7285,31 @@ function tok (n) {
t[n] = R++ t[n] = R++
} }
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
var safeRegexReplacements = [
['\\s', 1],
['\\d', MAX_LENGTH],
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
]
function makeSafeRe (value) {
for (var i = 0; i < safeRegexReplacements.length; i++) {
var token = safeRegexReplacements[i][0]
var max = safeRegexReplacements[i][1]
value = value
.split(token + '*').join(token + '{0,' + max + '}')
.split(token + '+').join(token + '{1,' + max + '}')
}
return value
}
// The following Regular Expressions can be used for tokenizing, // The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings. // validating, and parsing SemVer version strings.
@ -7295,14 +7319,14 @@ function tok (n) {
tok('NUMERICIDENTIFIER') tok('NUMERICIDENTIFIER')
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
tok('NUMERICIDENTIFIERLOOSE') tok('NUMERICIDENTIFIERLOOSE')
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
// ## Non-numeric Identifier // ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or // Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens. // more letters, digits, or hyphens.
tok('NONNUMERICIDENTIFIER') tok('NONNUMERICIDENTIFIER')
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
// ## Main Version // ## Main Version
// Three dot-separated numeric identifiers. // Three dot-separated numeric identifiers.
@ -7344,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
// Any combination of digits, letters, or hyphens. // Any combination of digits, letters, or hyphens.
tok('BUILDIDENTIFIER') tok('BUILDIDENTIFIER')
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
// ## Build Metadata // ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata // Plus sign, followed by one or more period-separated build metadata
@ -7424,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
'(?:$|[^\\d])' '(?:$|[^\\d])'
tok('COERCERTL') tok('COERCERTL')
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
// Tilde ranges. // Tilde ranges.
// Meaning is "reasonably at or greater than" // Meaning is "reasonably at or greater than"
@ -7433,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
tok('TILDETRIM') tok('TILDETRIM')
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
var tildeTrimReplace = '$1~' var tildeTrimReplace = '$1~'
tok('TILDE') tok('TILDE')
@ -7448,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
tok('CARETTRIM') tok('CARETTRIM')
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
var caretTrimReplace = '$1^' var caretTrimReplace = '$1^'
tok('CARET') tok('CARET')
@ -7469,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
// this one has to use the /g flag // this one has to use the /g flag
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
var comparatorTrimReplace = '$1$2$3' var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4` // Something like `1.2.3 - 1.2.4`
@ -7497,6 +7525,14 @@ for (var i = 0; i < R; i++) {
debug(i, src[i]) debug(i, src[i])
if (!re[i]) { if (!re[i]) {
re[i] = new RegExp(src[i]) re[i] = new RegExp(src[i])
// Replace all greedy whitespace to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
safeRe[i] = new RegExp(makeSafeRe(src[i]))
} }
} }
@ -7521,7 +7557,7 @@ function parse (version, options) {
return null return null
} }
var r = options.loose ? re[t.LOOSE] : re[t.FULL] var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
if (!r.test(version)) { if (!r.test(version)) {
return null return null
} }
@ -7576,7 +7612,7 @@ function SemVer (version, options) {
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
if (!m) { if (!m) {
throw new TypeError('Invalid Version: ' + version) throw new TypeError('Invalid Version: ' + version)
@ -8021,6 +8057,7 @@ function Comparator (comp, options) {
return new Comparator(comp, options) return new Comparator(comp, options)
} }
comp = comp.trim().split(/\s+/).join(' ')
debug('comparator', comp, options) debug('comparator', comp, options)
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
@ -8037,7 +8074,7 @@ function Comparator (comp, options) {
var ANY = {} var ANY = {}
Comparator.prototype.parse = function (comp) { Comparator.prototype.parse = function (comp) {
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var m = comp.match(r) var m = comp.match(r)
if (!m) { if (!m) {
@ -8161,9 +8198,16 @@ function Range (range, options) {
this.loose = !!options.loose this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease this.includePrerelease = !!options.includePrerelease
// First, split based on boolean or || // First reduce all whitespace as much as possible so we do not have to rely
// on potentially slow regexes like \s*. This is then stored and used for
// future error messages as well.
this.raw = range this.raw = range
this.set = range.split(/\s*\|\|\s*/).map(function (range) { .trim()
.split(/\s+/)
.join(' ')
// First, split based on boolean or ||
this.set = this.raw.split('||').map(function (range) {
return this.parseRange(range.trim()) return this.parseRange(range.trim())
}, this).filter(function (c) { }, this).filter(function (c) {
// throw out any that are not relevant for whatever reason // throw out any that are not relevant for whatever reason
@ -8171,7 +8215,7 @@ function Range (range, options) {
}) })
if (!this.set.length) { if (!this.set.length) {
throw new TypeError('Invalid SemVer Range: ' + range) throw new TypeError('Invalid SemVer Range: ' + this.raw)
} }
this.format() this.format()
@ -8190,20 +8234,19 @@ Range.prototype.toString = function () {
Range.prototype.parseRange = function (range) { Range.prototype.parseRange = function (range) {
var loose = this.options.loose var loose = this.options.loose
range = range.trim()
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace) range = range.replace(hr, hyphenReplace)
debug('hyphen replace', range) debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range, re[t.COMPARATORTRIM]) debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
// `~ 1.2.3` => `~1.2.3` // `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace) range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
// `^ 1.2.3` => `^1.2.3` // `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace) range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
// normalize spaces // normalize spaces
range = range.split(/\s+/).join(' ') range = range.split(/\s+/).join(' ')
@ -8211,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
// At this point, the range is completely trimmed and // At this point, the range is completely trimmed and
// ready to be split into comparators. // ready to be split into comparators.
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var set = range.split(' ').map(function (comp) { var set = range.split(' ').map(function (comp) {
return parseComparator(comp, this.options) return parseComparator(comp, this.options)
}, this).join(' ').split(/\s+/) }, this).join(' ').split(/\s+/)
@ -8311,7 +8354,7 @@ function replaceTildes (comp, options) {
} }
function replaceTilde (comp, options) { function replaceTilde (comp, options) {
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('tilde', comp, _, M, m, p, pr) debug('tilde', comp, _, M, m, p, pr)
var ret var ret
@ -8352,7 +8395,7 @@ function replaceCarets (comp, options) {
function replaceCaret (comp, options) { function replaceCaret (comp, options) {
debug('caret', comp, options) debug('caret', comp, options)
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('caret', comp, _, M, m, p, pr) debug('caret', comp, _, M, m, p, pr)
var ret var ret
@ -8411,7 +8454,7 @@ function replaceXRanges (comp, options) {
function replaceXRange (comp, options) { function replaceXRange (comp, options) {
comp = comp.trim() comp = comp.trim()
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
return comp.replace(r, function (ret, gtlt, M, m, p, pr) { return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
debug('xRange', comp, ret, gtlt, M, m, p, pr) debug('xRange', comp, ret, gtlt, M, m, p, pr)
var xM = isX(M) var xM = isX(M)
@ -8486,7 +8529,7 @@ function replaceXRange (comp, options) {
function replaceStars (comp, options) { function replaceStars (comp, options) {
debug('replaceStars', comp, options) debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets! // Looseness is ignored here. star is always as loose as it gets!
return comp.trim().replace(re[t.STAR], '') return comp.trim().replace(safeRe[t.STAR], '')
} }
// This function is passed to string.replace(re[t.HYPHENRANGE]) // This function is passed to string.replace(re[t.HYPHENRANGE])
@ -8812,7 +8855,7 @@ function coerce (version, options) {
var match = null var match = null
if (!options.rtl) { if (!options.rtl) {
match = version.match(re[t.COERCE]) match = version.match(safeRe[t.COERCE])
} else { } else {
// Find the right-most coercible string that does not share // Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string. // a terminus with a more left-ward coercible string.
@ -8823,17 +8866,17 @@ function coerce (version, options) {
// Stop when we get a match that ends at the string end, since no // Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus. // coercible string can be more right-ward without the same terminus.
var next var next
while ((next = re[t.COERCERTL].exec(version)) && while ((next = safeRe[t.COERCERTL].exec(version)) &&
(!match || match.index + match[0].length !== version.length) (!match || match.index + match[0].length !== version.length)
) { ) {
if (!match || if (!match ||
next.index + next[0].length !== match.index + match[0].length) { next.index + next[0].length !== match.index + match[0].length) {
match = next match = next
} }
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
} }
// leave it in a clean state // leave it in a clean state
re[t.COERCERTL].lastIndex = -1 safeRe[t.COERCERTL].lastIndex = -1
} }
if (match === null) { if (match === null) {
@ -50242,14 +50285,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
this.saxParser.onopentag = (function(_this) { this.saxParser.onopentag = (function(_this) {
return function(node) { return function(node) {
var key, newValue, obj, processedKey, ref; var key, newValue, obj, processedKey, ref;
obj = {}; obj = Object.create(null);
obj[charkey] = ""; obj[charkey] = "";
if (!_this.options.ignoreAttrs) { if (!_this.options.ignoreAttrs) {
ref = node.attributes; ref = node.attributes;
for (key in ref) { for (key in ref) {
if (!hasProp.call(ref, key)) continue; if (!hasProp.call(ref, key)) continue;
if (!(attrkey in obj) && !_this.options.mergeAttrs) { if (!(attrkey in obj) && !_this.options.mergeAttrs) {
obj[attrkey] = {}; obj[attrkey] = Object.create(null);
} }
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key]; newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key; processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
@ -50299,8 +50342,12 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} }
} }
if (isEmpty(obj)) { if (isEmpty(obj)) {
if (typeof _this.options.emptyTag === 'function') {
obj = _this.options.emptyTag();
} else {
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr; obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
} }
}
if (_this.options.validator != null) { if (_this.options.validator != null) {
xpath = "/" + ((function() { xpath = "/" + ((function() {
var i, len, results; var i, len, results;
@ -50323,7 +50370,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} }
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') { if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
if (!_this.options.preserveChildrenOrder) { if (!_this.options.preserveChildrenOrder) {
node = {}; node = Object.create(null);
if (_this.options.attrkey in obj) { if (_this.options.attrkey in obj) {
node[_this.options.attrkey] = obj[_this.options.attrkey]; node[_this.options.attrkey] = obj[_this.options.attrkey];
delete obj[_this.options.attrkey]; delete obj[_this.options.attrkey];
@ -50338,7 +50385,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
obj = node; obj = node;
} else if (s) { } else if (s) {
s[_this.options.childkey] = s[_this.options.childkey] || []; s[_this.options.childkey] = s[_this.options.childkey] || [];
objClone = {}; objClone = Object.create(null);
for (key in obj) { for (key in obj) {
if (!hasProp.call(obj, key)) continue; if (!hasProp.call(obj, key)) continue;
objClone[key] = obj[key]; objClone[key] = obj[key];
@ -50355,7 +50402,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
} else { } else {
if (_this.options.explicitRoot) { if (_this.options.explicitRoot) {
old = obj; old = obj;
obj = {}; obj = Object.create(null);
obj[nodeName] = old; obj[nodeName] = old;
} }
_this.resultObject = obj; _this.resultObject = obj;
@ -52557,7 +52604,7 @@ var CacheFilename;
(function (CacheFilename) { (function (CacheFilename) {
CacheFilename["Gzip"] = "cache.tgz"; CacheFilename["Gzip"] = "cache.tgz";
CacheFilename["Zstd"] = "cache.tzst"; CacheFilename["Zstd"] = "cache.tzst";
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); })(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
var CompressionMethod; var CompressionMethod;
(function (CompressionMethod) { (function (CompressionMethod) {
CompressionMethod["Gzip"] = "gzip"; CompressionMethod["Gzip"] = "gzip";
@ -52565,12 +52612,12 @@ var CompressionMethod;
// This enum is for earlier version of zstd that does not have --long support // This enum is for earlier version of zstd that does not have --long support
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd"; CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); })(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
var ArchiveToolType; var ArchiveToolType;
(function (ArchiveToolType) { (function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu"; ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd"; ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); })(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
// The default number of retry attempts. // The default number of retry attempts.
exports.DefaultRetryAttempts = 2; exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts. // The default delay in milliseconds between retry attempts.
@ -54968,7 +55015,7 @@ class HttpHeaders {
set(headerName, headerValue) { set(headerName, headerValue) {
this._headersMap[getHeaderKey(headerName)] = { this._headersMap[getHeaderKey(headerName)] = {
name: headerName, name: headerName,
value: headerValue.toString(), value: headerValue.toString().trim(),
}; };
} }
/** /**
@ -55108,7 +55155,7 @@ const Constants = {
/** /**
* The core-http version * The core-http version
*/ */
coreHttpVersion: "3.0.0", coreHttpVersion: "3.0.4",
/** /**
* Specifies HTTP. * Specifies HTTP.
*/ */
@ -55186,13 +55233,6 @@ const XML_CHARKEY = "_";
// Copyright (c) Microsoft Corporation. // Copyright (c) Microsoft Corporation.
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
/**
* A constant that indicates whether the environment is node.js or browser based.
*/
const isNode = typeof process !== "undefined" &&
!!process.version &&
!!process.versions &&
!!process.versions.node;
/** /**
* Encodes an URI. * Encodes an URI.
* *
@ -59873,7 +59913,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
factories.push(throttlingRetryPolicy()); factories.push(throttlingRetryPolicy());
} }
factories.push(deserializationPolicy(options.deserializationContentTypes)); factories.push(deserializationPolicy(options.deserializationContentTypes));
if (isNode) { if (coreUtil.isNode) {
factories.push(proxyPolicy(options.proxySettings)); factories.push(proxyPolicy(options.proxySettings));
} }
factories.push(logPolicy({ logger: logger.info })); factories.push(logPolicy({ logger: logger.info }));
@ -59905,7 +59945,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
if (isNode) { if (coreUtil.isNode) {
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
} }
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
@ -59918,7 +59958,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
requestPolicyFactories.push(authPolicyFactory); requestPolicyFactories.push(authPolicyFactory);
} }
requestPolicyFactories.push(logPolicy(loggingOptions)); requestPolicyFactories.push(logPolicy(loggingOptions));
if (isNode && pipelineOptions.decompressResponse === false) { if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
requestPolicyFactories.push(disableResponseDecompressionPolicy()); requestPolicyFactories.push(disableResponseDecompressionPolicy());
} }
return { return {
@ -60049,10 +60089,7 @@ function flattenResponse(_response, responseSpec) {
} }
function getCredentialScopes(options, baseUri) { function getCredentialScopes(options, baseUri) {
if (options === null || options === void 0 ? void 0 : options.credentialScopes) { if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
const scopes = options.credentialScopes; return options.credentialScopes;
return Array.isArray(scopes)
? scopes.map((scope) => new URL(scope).toString())
: new URL(scopes).toString();
} }
if (baseUri) { if (baseUri) {
return `${baseUri}/.default`; return `${baseUri}/.default`;
@ -60285,6 +60322,10 @@ Object.defineProperty(exports, 'delay', {
enumerable: true, enumerable: true,
get: function () { return coreUtil.delay; } get: function () { return coreUtil.delay; }
}); });
Object.defineProperty(exports, 'isNode', {
enumerable: true,
get: function () { return coreUtil.isNode; }
});
Object.defineProperty(exports, 'isTokenCredential', { Object.defineProperty(exports, 'isTokenCredential', {
enumerable: true, enumerable: true,
get: function () { return coreAuth.isTokenCredential; } get: function () { return coreAuth.isTokenCredential; }
@ -60324,7 +60365,6 @@ exports.generateUuid = generateUuid;
exports.getDefaultProxySettings = getDefaultProxySettings; exports.getDefaultProxySettings = getDefaultProxySettings;
exports.getDefaultUserAgentValue = getDefaultUserAgentValue; exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
exports.isDuration = isDuration; exports.isDuration = isDuration;
exports.isNode = isNode;
exports.isValidUuid = isValidUuid; exports.isValidUuid = isValidUuid;
exports.keepAlivePolicy = keepAlivePolicy; exports.keepAlivePolicy = keepAlivePolicy;
exports.logPolicy = logPolicy; exports.logPolicy = logPolicy;

1150
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{ {
"name": "cache", "name": "cache",
"version": "3.3.2", "version": "3.3.3",
"private": true, "private": true,
"description": "Cache dependencies and build outputs", "description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js", "main": "dist/restore/index.js",
@ -23,7 +23,7 @@
"author": "GitHub", "author": "GitHub",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/cache": "^3.2.2", "@actions/cache": "^3.2.3",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2" "@actions/io": "^1.1.2"