Add GITHUB_TOKEN Authentication

This commit is contained in:
Gregory Mitchell 2024-07-08 20:44:46 -05:00
parent 3653181378
commit b4d76a354b
No known key found for this signature in database
GPG key ID: 771A6C995A086B84
3 changed files with 93 additions and 515 deletions

297
dist/cleanup/index.js vendored
View file

@ -66943,132 +66943,6 @@ function onConnectTimeout (socket) {
module.exports = buildConnector module.exports = buildConnector
/***/ }),
/***/ 4462:
/***/ ((module) => {
"use strict";
/** @type {Record<string, string | undefined>} */
const headerNameLowerCasedRecord = {}
// https://developer.mozilla.org/docs/Web/HTTP/Headers
const wellknownHeaderNames = [
'Accept',
'Accept-Encoding',
'Accept-Language',
'Accept-Ranges',
'Access-Control-Allow-Credentials',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods',
'Access-Control-Allow-Origin',
'Access-Control-Expose-Headers',
'Access-Control-Max-Age',
'Access-Control-Request-Headers',
'Access-Control-Request-Method',
'Age',
'Allow',
'Alt-Svc',
'Alt-Used',
'Authorization',
'Cache-Control',
'Clear-Site-Data',
'Connection',
'Content-Disposition',
'Content-Encoding',
'Content-Language',
'Content-Length',
'Content-Location',
'Content-Range',
'Content-Security-Policy',
'Content-Security-Policy-Report-Only',
'Content-Type',
'Cookie',
'Cross-Origin-Embedder-Policy',
'Cross-Origin-Opener-Policy',
'Cross-Origin-Resource-Policy',
'Date',
'Device-Memory',
'Downlink',
'ECT',
'ETag',
'Expect',
'Expect-CT',
'Expires',
'Forwarded',
'From',
'Host',
'If-Match',
'If-Modified-Since',
'If-None-Match',
'If-Range',
'If-Unmodified-Since',
'Keep-Alive',
'Last-Modified',
'Link',
'Location',
'Max-Forwards',
'Origin',
'Permissions-Policy',
'Pragma',
'Proxy-Authenticate',
'Proxy-Authorization',
'RTT',
'Range',
'Referer',
'Referrer-Policy',
'Refresh',
'Retry-After',
'Sec-WebSocket-Accept',
'Sec-WebSocket-Extensions',
'Sec-WebSocket-Key',
'Sec-WebSocket-Protocol',
'Sec-WebSocket-Version',
'Server',
'Server-Timing',
'Service-Worker-Allowed',
'Service-Worker-Navigation-Preload',
'Set-Cookie',
'SourceMap',
'Strict-Transport-Security',
'Supports-Loading-Mode',
'TE',
'Timing-Allow-Origin',
'Trailer',
'Transfer-Encoding',
'Upgrade',
'Upgrade-Insecure-Requests',
'User-Agent',
'Vary',
'Via',
'WWW-Authenticate',
'X-Content-Type-Options',
'X-DNS-Prefetch-Control',
'X-Frame-Options',
'X-Permitted-Cross-Domain-Policies',
'X-Powered-By',
'X-Requested-With',
'X-XSS-Protection'
]
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = wellknownHeaderNames[i]
const lowerCasedKey = key.toLowerCase()
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
lowerCasedKey
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
module.exports = {
wellknownHeaderNames,
headerNameLowerCasedRecord
}
/***/ }), /***/ }),
/***/ 8045: /***/ 8045:
@ -67901,7 +67775,6 @@ const { InvalidArgumentError } = __nccwpck_require__(8045)
const { Blob } = __nccwpck_require__(4300) const { Blob } = __nccwpck_require__(4300)
const nodeUtil = __nccwpck_require__(3837) const nodeUtil = __nccwpck_require__(3837)
const { stringify } = __nccwpck_require__(3477) const { stringify } = __nccwpck_require__(3477)
const { headerNameLowerCasedRecord } = __nccwpck_require__(4462)
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
@ -68111,15 +67984,6 @@ function parseKeepAliveTimeout (val) {
return m ? parseInt(m[1], 10) * 1000 : null return m ? parseInt(m[1], 10) * 1000 : null
} }
/**
* Retrieves a header name and returns its lowercase value.
* @param {string | Buffer} value Header name
* @returns {string}
*/
function headerNameToString (value) {
return headerNameLowerCasedRecord[value] || value.toLowerCase()
}
function parseHeaders (headers, obj = {}) { function parseHeaders (headers, obj = {}) {
// For H2 support // For H2 support
if (!Array.isArray(headers)) return headers if (!Array.isArray(headers)) return headers
@ -68391,7 +68255,6 @@ module.exports = {
isIterable, isIterable,
isAsyncIterable, isAsyncIterable,
isDestroyed, isDestroyed,
headerNameToString,
parseRawHeaders, parseRawHeaders,
parseHeaders, parseHeaders,
parseKeepAliveTimeout, parseKeepAliveTimeout,
@ -75039,18 +74902,14 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983
const assert = __nccwpck_require__(9491) const assert = __nccwpck_require__(9491)
const { isUint8Array } = __nccwpck_require__(9830) const { isUint8Array } = __nccwpck_require__(9830)
let supportedHashes = []
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')|undefined} */ /** @type {import('crypto')|undefined} */
let crypto let crypto
try { try {
crypto = __nccwpck_require__(6113) crypto = __nccwpck_require__(6113)
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
/* c8 ignore next 3 */
} catch { } catch {
} }
function responseURL (response) { function responseURL (response) {
@ -75578,56 +75437,66 @@ function bytesMatch (bytes, metadataList) {
return true return true
} }
// 3. If response is not eligible for integrity validation, return false. // 3. If parsedMetadata is the empty set, return true.
// TODO
// 4. If parsedMetadata is the empty set, return true.
if (parsedMetadata.length === 0) { if (parsedMetadata.length === 0) {
return true return true
} }
// 5. Let metadata be the result of getting the strongest // 4. Let metadata be the result of getting the strongest
// metadata from parsedMetadata. // metadata from parsedMetadata.
const strongest = getStrongestMetadata(parsedMetadata) const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) // get the strongest algorithm
const strongest = list[0].algo
// get all entries that use the strongest algorithm; ignore weaker
const metadata = list.filter((item) => item.algo === strongest)
// 6. For each item in metadata: // 5. For each item in metadata:
for (const item of metadata) { for (const item of metadata) {
// 1. Let algorithm be the alg component of item. // 1. Let algorithm be the alg component of item.
const algorithm = item.algo const algorithm = item.algo
// 2. Let expectedValue be the val component of item. // 2. Let expectedValue be the val component of item.
const expectedValue = item.hash let expectedValue = item.hash
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec. // "be liberal with padding". This is annoying, and it's not even in the spec.
if (expectedValue.endsWith('==')) {
expectedValue = expectedValue.slice(0, -2)
}
// 3. Let actualValue be the result of applying algorithm to bytes. // 3. Let actualValue be the result of applying algorithm to bytes.
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
if (actualValue[actualValue.length - 1] === '=') { if (actualValue.endsWith('==')) {
if (actualValue[actualValue.length - 2] === '=') { actualValue = actualValue.slice(0, -2)
actualValue = actualValue.slice(0, -2)
} else {
actualValue = actualValue.slice(0, -1)
}
} }
// 4. If actualValue is a case-sensitive match for expectedValue, // 4. If actualValue is a case-sensitive match for expectedValue,
// return true. // return true.
if (compareBase64Mixed(actualValue, expectedValue)) { if (actualValue === expectedValue) {
return true
}
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
if (actualBase64URL.endsWith('==')) {
actualBase64URL = actualBase64URL.slice(0, -2)
}
if (actualBase64URL === expectedValue) {
return true return true
} }
} }
// 7. Return false. // 6. Return false.
return false return false
} }
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
/** /**
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
@ -75641,6 +75510,8 @@ function parseMetadata (metadata) {
// 2. Let empty be equal to true. // 2. Let empty be equal to true.
let empty = true let empty = true
const supportedHashes = crypto.getHashes()
// 3. For each token returned by splitting metadata on spaces: // 3. For each token returned by splitting metadata on spaces:
for (const token of metadata.split(' ')) { for (const token of metadata.split(' ')) {
// 1. Set empty to false. // 1. Set empty to false.
@ -75650,11 +75521,7 @@ function parseMetadata (metadata) {
const parsedToken = parseHashWithOptions.exec(token) const parsedToken = parseHashWithOptions.exec(token)
// 3. If token does not parse, continue to the next token. // 3. If token does not parse, continue to the next token.
if ( if (parsedToken === null || parsedToken.groups === undefined) {
parsedToken === null ||
parsedToken.groups === undefined ||
parsedToken.groups.algo === undefined
) {
// Note: Chromium blocks the request at this point, but Firefox // Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The // gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not // correct behavior is to ignore these, and subsequently not
@ -75663,11 +75530,11 @@ function parseMetadata (metadata) {
} }
// 4. Let algorithm be the hash-algo component of token. // 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken.groups.algo.toLowerCase() const algorithm = parsedToken.groups.algo
// 5. If algorithm is a hash function recognized by the user // 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result. // agent, add the parsed token to result.
if (supportedHashes.includes(algorithm)) { if (supportedHashes.includes(algorithm.toLowerCase())) {
result.push(parsedToken.groups) result.push(parsedToken.groups)
} }
} }
@ -75680,82 +75547,6 @@ function parseMetadata (metadata) {
return result return result
} }
/**
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
*/
function getStrongestMetadata (metadataList) {
// Let algorithm be the algo component of the first item in metadataList.
// Can be sha256
let algorithm = metadataList[0].algo
// If the algorithm is sha512, then it is the strongest
// and we can return immediately
if (algorithm[3] === '5') {
return algorithm
}
for (let i = 1; i < metadataList.length; ++i) {
const metadata = metadataList[i]
// If the algorithm is sha512, then it is the strongest
// and we can break the loop immediately
if (metadata.algo[3] === '5') {
algorithm = 'sha512'
break
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
} else if (algorithm[3] === '3') {
continue
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
// the strongest
} else if (metadata.algo[3] === '3') {
algorithm = 'sha384'
}
}
return algorithm
}
function filterMetadataListByAlgorithm (metadataList, algorithm) {
if (metadataList.length === 1) {
return metadataList
}
let pos = 0
for (let i = 0; i < metadataList.length; ++i) {
if (metadataList[i].algo === algorithm) {
metadataList[pos++] = metadataList[i]
}
}
metadataList.length = pos
return metadataList
}
/**
* Compares two base64 strings, allowing for base64url
* in the second string.
*
* @param {string} actualValue always base64
* @param {string} expectedValue base64 or base64url
* @returns {boolean}
*/
function compareBase64Mixed (actualValue, expectedValue) {
if (actualValue.length !== expectedValue.length) {
return false
}
for (let i = 0; i < actualValue.length; ++i) {
if (actualValue[i] !== expectedValue[i]) {
if (
(actualValue[i] === '+' && expectedValue[i] === '-') ||
(actualValue[i] === '/' && expectedValue[i] === '_')
) {
continue
}
return false
}
}
return true
}
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
// TODO // TODO
@ -76171,8 +75962,7 @@ module.exports = {
urlHasHttpsScheme, urlHasHttpsScheme,
urlIsHttpHttpsScheme, urlIsHttpHttpsScheme,
readAllBytes, readAllBytes,
normalizeMethodRecord, normalizeMethodRecord
parseMetadata
} }
@ -78259,17 +78049,12 @@ function parseLocation (statusCode, headers) {
// https://tools.ietf.org/html/rfc7231#section-6.4.4 // https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader (header, removeContent, unknownOrigin) { function shouldRemoveHeader (header, removeContent, unknownOrigin) {
if (header.length === 4) { return (
return util.headerNameToString(header) === 'host' (header.length === 4 && header.toString().toLowerCase() === 'host') ||
} (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
if (removeContent && util.headerNameToString(header).startsWith('content-')) { (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
return true (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
} )
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
const name = util.headerNameToString(header)
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
} }
// https://tools.ietf.org/html/rfc7231#section-6.4 // https://tools.ietf.org/html/rfc7231#section-6.4

303
dist/setup/index.js vendored
View file

@ -91797,132 +91797,6 @@ function onConnectTimeout (socket) {
module.exports = buildConnector module.exports = buildConnector
/***/ }),
/***/ 14462:
/***/ ((module) => {
"use strict";
/** @type {Record<string, string | undefined>} */
const headerNameLowerCasedRecord = {}
// https://developer.mozilla.org/docs/Web/HTTP/Headers
const wellknownHeaderNames = [
'Accept',
'Accept-Encoding',
'Accept-Language',
'Accept-Ranges',
'Access-Control-Allow-Credentials',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods',
'Access-Control-Allow-Origin',
'Access-Control-Expose-Headers',
'Access-Control-Max-Age',
'Access-Control-Request-Headers',
'Access-Control-Request-Method',
'Age',
'Allow',
'Alt-Svc',
'Alt-Used',
'Authorization',
'Cache-Control',
'Clear-Site-Data',
'Connection',
'Content-Disposition',
'Content-Encoding',
'Content-Language',
'Content-Length',
'Content-Location',
'Content-Range',
'Content-Security-Policy',
'Content-Security-Policy-Report-Only',
'Content-Type',
'Cookie',
'Cross-Origin-Embedder-Policy',
'Cross-Origin-Opener-Policy',
'Cross-Origin-Resource-Policy',
'Date',
'Device-Memory',
'Downlink',
'ECT',
'ETag',
'Expect',
'Expect-CT',
'Expires',
'Forwarded',
'From',
'Host',
'If-Match',
'If-Modified-Since',
'If-None-Match',
'If-Range',
'If-Unmodified-Since',
'Keep-Alive',
'Last-Modified',
'Link',
'Location',
'Max-Forwards',
'Origin',
'Permissions-Policy',
'Pragma',
'Proxy-Authenticate',
'Proxy-Authorization',
'RTT',
'Range',
'Referer',
'Referrer-Policy',
'Refresh',
'Retry-After',
'Sec-WebSocket-Accept',
'Sec-WebSocket-Extensions',
'Sec-WebSocket-Key',
'Sec-WebSocket-Protocol',
'Sec-WebSocket-Version',
'Server',
'Server-Timing',
'Service-Worker-Allowed',
'Service-Worker-Navigation-Preload',
'Set-Cookie',
'SourceMap',
'Strict-Transport-Security',
'Supports-Loading-Mode',
'TE',
'Timing-Allow-Origin',
'Trailer',
'Transfer-Encoding',
'Upgrade',
'Upgrade-Insecure-Requests',
'User-Agent',
'Vary',
'Via',
'WWW-Authenticate',
'X-Content-Type-Options',
'X-DNS-Prefetch-Control',
'X-Frame-Options',
'X-Permitted-Cross-Domain-Policies',
'X-Powered-By',
'X-Requested-With',
'X-XSS-Protection'
]
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = wellknownHeaderNames[i]
const lowerCasedKey = key.toLowerCase()
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
lowerCasedKey
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
module.exports = {
wellknownHeaderNames,
headerNameLowerCasedRecord
}
/***/ }), /***/ }),
/***/ 48045: /***/ 48045:
@ -92755,7 +92629,6 @@ const { InvalidArgumentError } = __nccwpck_require__(48045)
const { Blob } = __nccwpck_require__(14300) const { Blob } = __nccwpck_require__(14300)
const nodeUtil = __nccwpck_require__(73837) const nodeUtil = __nccwpck_require__(73837)
const { stringify } = __nccwpck_require__(63477) const { stringify } = __nccwpck_require__(63477)
const { headerNameLowerCasedRecord } = __nccwpck_require__(14462)
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
@ -92965,15 +92838,6 @@ function parseKeepAliveTimeout (val) {
return m ? parseInt(m[1], 10) * 1000 : null return m ? parseInt(m[1], 10) * 1000 : null
} }
/**
* Retrieves a header name and returns its lowercase value.
* @param {string | Buffer} value Header name
* @returns {string}
*/
function headerNameToString (value) {
return headerNameLowerCasedRecord[value] || value.toLowerCase()
}
function parseHeaders (headers, obj = {}) { function parseHeaders (headers, obj = {}) {
// For H2 support // For H2 support
if (!Array.isArray(headers)) return headers if (!Array.isArray(headers)) return headers
@ -93245,7 +93109,6 @@ module.exports = {
isIterable, isIterable,
isAsyncIterable, isAsyncIterable,
isDestroyed, isDestroyed,
headerNameToString,
parseRawHeaders, parseRawHeaders,
parseHeaders, parseHeaders,
parseKeepAliveTimeout, parseKeepAliveTimeout,
@ -99893,18 +99756,14 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398
const assert = __nccwpck_require__(39491) const assert = __nccwpck_require__(39491)
const { isUint8Array } = __nccwpck_require__(29830) const { isUint8Array } = __nccwpck_require__(29830)
let supportedHashes = []
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')|undefined} */ /** @type {import('crypto')|undefined} */
let crypto let crypto
try { try {
crypto = __nccwpck_require__(6113) crypto = __nccwpck_require__(6113)
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
/* c8 ignore next 3 */
} catch { } catch {
} }
function responseURL (response) { function responseURL (response) {
@ -100432,56 +100291,66 @@ function bytesMatch (bytes, metadataList) {
return true return true
} }
// 3. If response is not eligible for integrity validation, return false. // 3. If parsedMetadata is the empty set, return true.
// TODO
// 4. If parsedMetadata is the empty set, return true.
if (parsedMetadata.length === 0) { if (parsedMetadata.length === 0) {
return true return true
} }
// 5. Let metadata be the result of getting the strongest // 4. Let metadata be the result of getting the strongest
// metadata from parsedMetadata. // metadata from parsedMetadata.
const strongest = getStrongestMetadata(parsedMetadata) const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) // get the strongest algorithm
const strongest = list[0].algo
// get all entries that use the strongest algorithm; ignore weaker
const metadata = list.filter((item) => item.algo === strongest)
// 6. For each item in metadata: // 5. For each item in metadata:
for (const item of metadata) { for (const item of metadata) {
// 1. Let algorithm be the alg component of item. // 1. Let algorithm be the alg component of item.
const algorithm = item.algo const algorithm = item.algo
// 2. Let expectedValue be the val component of item. // 2. Let expectedValue be the val component of item.
const expectedValue = item.hash let expectedValue = item.hash
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec. // "be liberal with padding". This is annoying, and it's not even in the spec.
if (expectedValue.endsWith('==')) {
expectedValue = expectedValue.slice(0, -2)
}
// 3. Let actualValue be the result of applying algorithm to bytes. // 3. Let actualValue be the result of applying algorithm to bytes.
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
if (actualValue[actualValue.length - 1] === '=') { if (actualValue.endsWith('==')) {
if (actualValue[actualValue.length - 2] === '=') { actualValue = actualValue.slice(0, -2)
actualValue = actualValue.slice(0, -2)
} else {
actualValue = actualValue.slice(0, -1)
}
} }
// 4. If actualValue is a case-sensitive match for expectedValue, // 4. If actualValue is a case-sensitive match for expectedValue,
// return true. // return true.
if (compareBase64Mixed(actualValue, expectedValue)) { if (actualValue === expectedValue) {
return true
}
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
if (actualBase64URL.endsWith('==')) {
actualBase64URL = actualBase64URL.slice(0, -2)
}
if (actualBase64URL === expectedValue) {
return true return true
} }
} }
// 7. Return false. // 6. Return false.
return false return false
} }
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
/** /**
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
@ -100495,6 +100364,8 @@ function parseMetadata (metadata) {
// 2. Let empty be equal to true. // 2. Let empty be equal to true.
let empty = true let empty = true
const supportedHashes = crypto.getHashes()
// 3. For each token returned by splitting metadata on spaces: // 3. For each token returned by splitting metadata on spaces:
for (const token of metadata.split(' ')) { for (const token of metadata.split(' ')) {
// 1. Set empty to false. // 1. Set empty to false.
@ -100504,11 +100375,7 @@ function parseMetadata (metadata) {
const parsedToken = parseHashWithOptions.exec(token) const parsedToken = parseHashWithOptions.exec(token)
// 3. If token does not parse, continue to the next token. // 3. If token does not parse, continue to the next token.
if ( if (parsedToken === null || parsedToken.groups === undefined) {
parsedToken === null ||
parsedToken.groups === undefined ||
parsedToken.groups.algo === undefined
) {
// Note: Chromium blocks the request at this point, but Firefox // Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The // gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not // correct behavior is to ignore these, and subsequently not
@ -100517,11 +100384,11 @@ function parseMetadata (metadata) {
} }
// 4. Let algorithm be the hash-algo component of token. // 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken.groups.algo.toLowerCase() const algorithm = parsedToken.groups.algo
// 5. If algorithm is a hash function recognized by the user // 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result. // agent, add the parsed token to result.
if (supportedHashes.includes(algorithm)) { if (supportedHashes.includes(algorithm.toLowerCase())) {
result.push(parsedToken.groups) result.push(parsedToken.groups)
} }
} }
@ -100534,82 +100401,6 @@ function parseMetadata (metadata) {
return result return result
} }
/**
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
*/
function getStrongestMetadata (metadataList) {
// Let algorithm be the algo component of the first item in metadataList.
// Can be sha256
let algorithm = metadataList[0].algo
// If the algorithm is sha512, then it is the strongest
// and we can return immediately
if (algorithm[3] === '5') {
return algorithm
}
for (let i = 1; i < metadataList.length; ++i) {
const metadata = metadataList[i]
// If the algorithm is sha512, then it is the strongest
// and we can break the loop immediately
if (metadata.algo[3] === '5') {
algorithm = 'sha512'
break
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
} else if (algorithm[3] === '3') {
continue
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
// the strongest
} else if (metadata.algo[3] === '3') {
algorithm = 'sha384'
}
}
return algorithm
}
function filterMetadataListByAlgorithm (metadataList, algorithm) {
if (metadataList.length === 1) {
return metadataList
}
let pos = 0
for (let i = 0; i < metadataList.length; ++i) {
if (metadataList[i].algo === algorithm) {
metadataList[pos++] = metadataList[i]
}
}
metadataList.length = pos
return metadataList
}
/**
* Compares two base64 strings, allowing for base64url
* in the second string.
*
* @param {string} actualValue always base64
* @param {string} expectedValue base64 or base64url
* @returns {boolean}
*/
function compareBase64Mixed (actualValue, expectedValue) {
if (actualValue.length !== expectedValue.length) {
return false
}
for (let i = 0; i < actualValue.length; ++i) {
if (actualValue[i] !== expectedValue[i]) {
if (
(actualValue[i] === '+' && expectedValue[i] === '-') ||
(actualValue[i] === '/' && expectedValue[i] === '_')
) {
continue
}
return false
}
}
return true
}
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
// TODO // TODO
@ -101025,8 +100816,7 @@ module.exports = {
urlHasHttpsScheme, urlHasHttpsScheme,
urlIsHttpHttpsScheme, urlIsHttpHttpsScheme,
readAllBytes, readAllBytes,
normalizeMethodRecord, normalizeMethodRecord
parseMetadata
} }
@ -103113,17 +102903,12 @@ function parseLocation (statusCode, headers) {
// https://tools.ietf.org/html/rfc7231#section-6.4.4 // https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader (header, removeContent, unknownOrigin) { function shouldRemoveHeader (header, removeContent, unknownOrigin) {
if (header.length === 4) { return (
return util.headerNameToString(header) === 'host' (header.length === 4 && header.toString().toLowerCase() === 'host') ||
} (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
if (removeContent && util.headerNameToString(header).startsWith('content-')) { (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
return true (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
} )
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
const name = util.headerNameToString(header)
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
} }
// https://tools.ietf.org/html/rfc7231#section-6.4 // https://tools.ietf.org/html/rfc7231#section-6.4
@ -124416,12 +124201,16 @@ class JetBrainsDistribution extends base_installer_1.JavaBase {
const rawVersions = []; const rawVersions = [];
while (true) { while (true) {
const requestArguments = `per_page=100&page=${page_index}`; const requestArguments = `per_page=100&page=${page_index}`;
const requestHeaders = {
"User-Agent": "jetbrains-jbr-installer",
"Authorization": `Token ${process.env.GITHUB_TOKEN}`,
};
const rawUrl = `https://api.github.com/repos/JetBrains/JetBrainsRuntime/releases?${requestArguments}`; const rawUrl = `https://api.github.com/repos/JetBrains/JetBrainsRuntime/releases?${requestArguments}`;
if (core.isDebug() && page_index === 1) { if (core.isDebug() && page_index === 1) {
// url is identical except page_index so print it once for debug // url is identical except page_index so print it once for debug
core.debug(`Gathering available versions from '${rawUrl}'`); core.debug(`Gathering available versions from '${rawUrl}'`);
} }
const paginationPage = (yield this.http.getJson(rawUrl)).result; const paginationPage = (yield this.http.getJson(rawUrl, requestHeaders)).result;
if (!paginationPage || paginationPage.length === 0) { if (!paginationPage || paginationPage.length === 0) {
// break infinity loop because we have reached end of pagination // break infinity loop because we have reached end of pagination
break; break;

View file

@ -14,9 +14,9 @@ import {
} from '../base-models'; } from '../base-models';
import { import {
extractJdkFile, extractJdkFile,
getDownloadArchiveExtension,
isVersionSatisfies isVersionSatisfies
} from '../../util'; } from '../../util';
import {IncomingHttpHeaders, OutgoingHttpHeaders} from "http";
export class JetBrainsDistribution extends JavaBase { export class JetBrainsDistribution extends JavaBase {
constructor(installerOptions: JavaInstallerOptions) { constructor(installerOptions: JavaInstallerOptions) {
@ -98,6 +98,10 @@ export class JetBrainsDistribution extends JavaBase {
const rawVersions: IJetBrainsRawVersion[] = []; const rawVersions: IJetBrainsRawVersion[] = [];
while (true) { while (true) {
const requestArguments = `per_page=100&page=${page_index}`; const requestArguments = `per_page=100&page=${page_index}`;
const requestHeaders: OutgoingHttpHeaders = {
"User-Agent": "jetbrains-jbr-installer",
"Authorization": `Token ${process.env.GITHUB_TOKEN}`,
}
const rawUrl = `https://api.github.com/repos/JetBrains/JetBrainsRuntime/releases?${requestArguments}`; const rawUrl = `https://api.github.com/repos/JetBrains/JetBrainsRuntime/releases?${requestArguments}`;
if (core.isDebug() && page_index === 1) { if (core.isDebug() && page_index === 1) {
@ -109,7 +113,7 @@ export class JetBrainsDistribution extends JavaBase {
const paginationPage = ( const paginationPage = (
await this.http.getJson<IJetBrainsRawVersion[]>( await this.http.getJson<IJetBrainsRawVersion[]>(
rawUrl rawUrl, requestHeaders
) )
).result; ).result;
if (!paginationPage || paginationPage.length === 0) { if (!paginationPage || paginationPage.length === 0) {