diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a523c319..e24faa9c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -5,11 +5,15 @@ on: branches: - master +env: + ACTIONS_STEP_DEBUG: true + jobs: public-cache: strategy: matrix: os: [ubuntu-latest, macos-latest] + useDaemon: [true, false] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 @@ -23,12 +27,14 @@ jobs: with: name: cachix-action signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' + useDaemon: ${{ matrix.useDaemon }} - run: nix-build test.nix public-cache-no-signing-key: strategy: matrix: os: [ubuntu-latest, macos-latest] + useDaemon: [true, false] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 @@ -39,6 +45,7 @@ jobs: uses: ./ with: name: cachix-action + useDaemon: ${{ matrix.useDaemon }} - run: nix-build test.nix private-cache: @@ -46,6 +53,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest] + useDaemon: [true, false] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 @@ -58,6 +66,7 @@ jobs: name: cachix-action-private signingKey: '${{ secrets.CACHIX_SIGNING_KEY_PRIVATE }}' authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + useDaemon: ${{ matrix.useDaemon }} - run: nix-build test.nix installCommand: @@ -87,7 +96,7 @@ jobs: - uses: actions/checkout@v4 - name: Install Nix uses: cachix/install-nix-action@v24 - with: + with: nix_path: nixpkgs=channel:nixos-22.11 install_url: https://nixos-nix-install-tests.cachix.org/serve/kkq45x7yrzvxq8627fi6hkswnfa7mg2l/install install_options: '--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve' diff --git a/action.yml b/action.yml index 9fd5c3b7..abc2cc9b 100644 --- a/action.yml +++ b/action.yml @@ -19,10 +19,15 @@ inputs: description: 'Ignored if pathsToPush is set. Regular expression to exclude derivations for the cache push, for example "(-source$|nixpkgs\.tar\.gz$)". Warning: this filter does not guarantee it will not get pushed in case the path is part of the closure of something that will get pushed.' cachixArgs: description: 'Extra command-line arguments to pass to cachix. If empty, defaults to -j8' - installCommand: - description: 'Override the default cachix installation method' skipAddingSubstituter: description: 'Set to true to skip adding cachix cache as a substitute' + useDaemon: + description: "Push store paths to the cache as they're built with the Cachix Daemon" + default: true + cachixBin: + description: 'Provide a custom path to the cachix binary' + installCommand: + description: 'Override the default cachix installation method' branding: color: 'blue' icon: 'database' diff --git a/dist/main/index.js b/dist/main/index.js index ea0ed9aa..989b7c95 100644 --- a/dist/main/index.js +++ b/dist/main/index.js @@ -3027,6 +3027,3176 @@ function copyFile(srcFile, destFile, force) { } //# sourceMappingURL=io.js.map +/***/ }), + +/***/ 7129: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// A linked list to keep track of recently-used-ness +const Yallist = __nccwpck_require__(665) + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache + + +/***/ }), + +/***/ 1532: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = __nccwpck_require__(785) +const { safeRe: re, t } = __nccwpck_require__(9523) +const cmp = __nccwpck_require__(5098) +const debug = __nccwpck_require__(427) +const SemVer = __nccwpck_require__(8088) +const Range = __nccwpck_require__(9828) + + +/***/ }), + +/***/ 9828: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.format() + } + + format () { + this.range = this.set + .map((comps) => comps.join(' ').trim()) + .join('||') + .trim() + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = __nccwpck_require__(7129) +const cache = new LRU({ max: 1000 }) + +const parseOptions = __nccwpck_require__(785) +const Comparator = __nccwpck_require__(1532) +const debug = __nccwpck_require__(427) +const SemVer = __nccwpck_require__(8088) +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = __nccwpck_require__(9523) +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = __nccwpck_require__(2293) + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + + +/***/ }), + +/***/ 8088: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const debug = __nccwpck_require__(427) +const { MAX_LENGTH, MAX_SAFE_INTEGER } = __nccwpck_require__(2293) +const { safeRe: re, t } = __nccwpck_require__(9523) + +const parseOptions = __nccwpck_require__(785) +const { compareIdentifiers } = __nccwpck_require__(2463) +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer + + +/***/ }), + +/***/ 8848: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const parse = __nccwpck_require__(5925) +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean + + +/***/ }), + +/***/ 5098: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const eq = __nccwpck_require__(1898) +const neq = __nccwpck_require__(6017) +const gt = __nccwpck_require__(4123) +const gte = __nccwpck_require__(5522) +const lt = __nccwpck_require__(194) +const lte = __nccwpck_require__(7520) + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp + + +/***/ }), + +/***/ 3466: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const parse = __nccwpck_require__(5925) +const { safeRe: re, t } = __nccwpck_require__(9523) + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + let next + while ((next = re[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + re[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) +} +module.exports = coerce + + +/***/ }), + +/***/ 2156: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild + + +/***/ }), + +/***/ 2804: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compare = __nccwpck_require__(4309) +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose + + +/***/ }), + +/***/ 4309: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare + + +/***/ }), + +/***/ 4297: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const parse = __nccwpck_require__(5925) + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff + + +/***/ }), + +/***/ 1898: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compare = __nccwpck_require__(4309) +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq + + +/***/ }), + +/***/ 4123: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compare = __nccwpck_require__(4309) +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt + + +/***/ }), + +/***/ 5522: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compare = __nccwpck_require__(4309) +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte + + +/***/ }), + +/***/ 900: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc + + +/***/ }), + +/***/ 194: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compare = __nccwpck_require__(4309) +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt + + +/***/ }), + +/***/ 7520: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compare = __nccwpck_require__(4309) +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte + + +/***/ }), + +/***/ 6688: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major + + +/***/ }), + +/***/ 8447: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor + + +/***/ }), + +/***/ 6017: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compare = __nccwpck_require__(4309) +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq + + +/***/ }), + +/***/ 5925: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse + + +/***/ }), + +/***/ 2866: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch + + +/***/ }), + +/***/ 4016: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const parse = __nccwpck_require__(5925) +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease + + +/***/ }), + +/***/ 6417: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compare = __nccwpck_require__(4309) +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare + + +/***/ }), + +/***/ 8701: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compareBuild = __nccwpck_require__(2156) +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort + + +/***/ }), + +/***/ 6055: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const Range = __nccwpck_require__(9828) +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies + + +/***/ }), + +/***/ 1426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const compareBuild = __nccwpck_require__(2156) +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort + + +/***/ }), + +/***/ 9601: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const parse = __nccwpck_require__(5925) +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid + + +/***/ }), + +/***/ 1383: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// just pre-load all the stuff that index.js lazily exports +const internalRe = __nccwpck_require__(9523) +const constants = __nccwpck_require__(2293) +const SemVer = __nccwpck_require__(8088) +const identifiers = __nccwpck_require__(2463) +const parse = __nccwpck_require__(5925) +const valid = __nccwpck_require__(9601) +const clean = __nccwpck_require__(8848) +const inc = __nccwpck_require__(900) +const diff = __nccwpck_require__(4297) +const major = __nccwpck_require__(6688) +const minor = __nccwpck_require__(8447) +const patch = __nccwpck_require__(2866) +const prerelease = __nccwpck_require__(4016) +const compare = __nccwpck_require__(4309) +const rcompare = __nccwpck_require__(6417) +const compareLoose = __nccwpck_require__(2804) +const compareBuild = __nccwpck_require__(2156) +const sort = __nccwpck_require__(1426) +const rsort = __nccwpck_require__(8701) +const gt = __nccwpck_require__(4123) +const lt = __nccwpck_require__(194) +const eq = __nccwpck_require__(1898) +const neq = __nccwpck_require__(6017) +const gte = __nccwpck_require__(5522) +const lte = __nccwpck_require__(7520) +const cmp = __nccwpck_require__(5098) +const coerce = __nccwpck_require__(3466) +const Comparator = __nccwpck_require__(1532) +const Range = __nccwpck_require__(9828) +const satisfies = __nccwpck_require__(6055) +const toComparators = __nccwpck_require__(2706) +const maxSatisfying = __nccwpck_require__(579) +const minSatisfying = __nccwpck_require__(832) +const minVersion = __nccwpck_require__(4179) +const validRange = __nccwpck_require__(2098) +const outside = __nccwpck_require__(420) +const gtr = __nccwpck_require__(9380) +const ltr = __nccwpck_require__(3323) +const intersects = __nccwpck_require__(7008) +const simplifyRange = __nccwpck_require__(5297) +const subset = __nccwpck_require__(7863) +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} + + +/***/ }), + +/***/ 2293: +/***/ ((module) => { + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} + + +/***/ }), + +/***/ 427: +/***/ ((module) => { + +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug + + +/***/ }), + +/***/ 2463: +/***/ ((module) => { + +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} + + +/***/ }), + +/***/ 785: +/***/ ((module) => { + +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions + + +/***/ }), + +/***/ 9523: +/***/ ((module, exports, __nccwpck_require__) => { + +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = __nccwpck_require__(2293) +const debug = __nccwpck_require__(427) +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCE', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') + + +/***/ }), + +/***/ 9380: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Determine if version is greater than all the versions possible in the range. +const outside = __nccwpck_require__(420) +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr + + +/***/ }), + +/***/ 7008: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const Range = __nccwpck_require__(9828) +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects + + +/***/ }), + +/***/ 3323: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const outside = __nccwpck_require__(420) +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr + + +/***/ }), + +/***/ 579: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const Range = __nccwpck_require__(9828) + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying + + +/***/ }), + +/***/ 832: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const Range = __nccwpck_require__(9828) +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying + + +/***/ }), + +/***/ 4179: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const Range = __nccwpck_require__(9828) +const gt = __nccwpck_require__(4123) + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion + + +/***/ }), + +/***/ 420: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(8088) +const Comparator = __nccwpck_require__(1532) +const { ANY } = Comparator +const Range = __nccwpck_require__(9828) +const satisfies = __nccwpck_require__(6055) +const gt = __nccwpck_require__(4123) +const lt = __nccwpck_require__(194) +const lte = __nccwpck_require__(7520) +const gte = __nccwpck_require__(5522) + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside + + +/***/ }), + +/***/ 5297: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = __nccwpck_require__(6055) +const compare = __nccwpck_require__(4309) +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} + + +/***/ }), + +/***/ 7863: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const Range = __nccwpck_require__(9828) +const Comparator = __nccwpck_require__(1532) +const { ANY } = Comparator +const satisfies = __nccwpck_require__(6055) +const compare = __nccwpck_require__(4309) + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset + + +/***/ }), + +/***/ 2706: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const Range = __nccwpck_require__(9828) + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators + + +/***/ }), + +/***/ 2098: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const Range = __nccwpck_require__(9828) +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange + + +/***/ }), + +/***/ 5824: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +let events = __nccwpck_require__(2361) +let fs = __nccwpck_require__(7147) +let path = __nccwpck_require__(1017) + +// const environment = process.env['NODE_ENV'] || 'development' + +class devNull { + info() { }; + error() { }; +}; + +class Tail extends events.EventEmitter { + + constructor(filename, options = {}) { + super(); + this.filename = filename; + this.absPath = path.dirname(this.filename); + this.separator = (options.separator !== undefined) ? options.separator : /[\r]{0,1}\n/;// null is a valid param + this.fsWatchOptions = options.fsWatchOptions || {}; + this.follow = options['follow'] != undefined ? options['follow'] : true; + this.logger = options.logger || new devNull(); + this.useWatchFile = options.useWatchFile || false; + this.flushAtEOF = options.flushAtEOF || false; + this.encoding = options.encoding || 'utf-8'; + const fromBeginning = options.fromBeginning || false; + this.nLines = options.nLines || undefined; + + this.logger.info(`Tail starting...`) + this.logger.info(`filename: ${this.filename}`); + this.logger.info(`encoding: ${this.encoding}`); + + try { + fs.accessSync(this.filename, fs.constants.F_OK); + } catch (err) { + if (err.code == 'ENOENT') { + throw err + } + } + + this.buffer = ''; + this.internalDispatcher = new events.EventEmitter(); + this.queue = []; + this.isWatching = false; + this.pos = 0; + + // this.internalDispatcher.on('next',this.readBlock); + this.internalDispatcher.on('next', () => { + this.readBlock(); + }); + + let cursor; + + this.logger.info(`fromBeginning: ${fromBeginning}`); + if (fromBeginning) { + cursor = 0; + } else if (this.nLines <= 0) { + cursor = 0; + } else if (this.nLines !== undefined) { + cursor = this.getPositionAtNthLine(this.nLines); + } else { + cursor = this.latestPosition(); + } + + if (cursor === undefined) throw new Error("Tail can't initialize."); + + const flush = fromBeginning || (this.nLines != undefined); + try { + this.watch(cursor, flush); + } catch (err) { + this.logger.error(`watch for ${this.filename} failed: ${err}`); + this.emit("error", `watch for ${this.filename} failed: ${err}`); + } + } + + /** + * Grabs the index of the last line of text in the format /.*(\n)?/. + * Returns null if a full line can not be found. + * @param {string} text + * @returns {number | null} + */ + getIndexOfLastLine(text) { + + /** + * Helper function get the last match as string + * @param {string} haystack + * @param {string | RegExp} needle + * @returns {string | undefined} + */ + const getLastMatch = (haystack, needle) => { + const matches = haystack.match(needle); + if (matches === null) { + return; + } + + return matches[matches.length - 1]; + }; + + const endSep = getLastMatch(text, this.separator); + + if (!endSep) return null; + + const endSepIndex = text.lastIndexOf(endSep); + let lastLine; + + if (text.endsWith(endSep)) { + // If the text ends with a separator, look back further to find the next + // separator to complete the line + + const trimmed = text.substring(0, endSepIndex); + const startSep = getLastMatch(trimmed, this.separator); + + // If there isn't another separator, the line isn't complete so + // so return null to get more data + + if (!startSep) { + return null; + } + + const startSepIndex = trimmed.lastIndexOf(startSep); + + // Exclude the starting separator, include the ending separator + + lastLine = text.substring( + startSepIndex + startSep.length, + endSepIndex + endSep.length + ); + } else { + // If the text does not end with a separator, grab everything after + // the last separator + lastLine = text.substring(endSepIndex + endSep.length); + } + + return text.lastIndexOf(lastLine); + } + + /** + * Returns the position of the start of the `nLines`th line from the bottom. + * Returns 0 if `nLines` is greater than the total number of lines in the file. + * @param {number} nLines + * @returns {number} + */ + getPositionAtNthLine(nLines) { + const { size } = fs.statSync(this.filename); + + if (size === 0) { + return 0; + } + + const fd = fs.openSync(this.filename, 'r'); + // Start from the end of the file and work backwards in specific chunks + let currentReadPosition = size; + const chunkSizeBytes = Math.min(1024, size); + const lineBytes = []; + + let remaining = ''; + + while (lineBytes.length < nLines) { + // Shift the current read position backward to the amount we're about to read + currentReadPosition -= chunkSizeBytes; + + // If negative, we've reached the beginning of the file and we should stop and return 0, starting the + // stream at the beginning. + if (currentReadPosition < 0) { + return 0; + } + + // Read a chunk of the file and prepend it to the working buffer + const buffer = Buffer.alloc(chunkSizeBytes); + const bytesRead = fs.readSync(fd, buffer, + 0, // position in buffer to write to + chunkSizeBytes, // number of bytes to read + currentReadPosition // position in file to read from + ); + + // .subarray returns Uint8Array in node versions < 16.x and Buffer + // in versions >= 16.x. To support both, allocate a new buffer with + // Buffer.from which accepts both types + const readArray = buffer.subarray(0, bytesRead); + remaining = Buffer.from(readArray).toString(this.encoding) + remaining; + + let index = this.getIndexOfLastLine(remaining); + + while (index !== null && lineBytes.length < nLines) { + const line = remaining.substring(index); + + lineBytes.push(Buffer.byteLength(line)); + remaining = remaining.substring(0, index); + + index = this.getIndexOfLastLine(remaining); + } + } + + fs.closeSync(fd); + + return size - lineBytes.reduce((acc, cur) => acc + cur, 0) + } + + latestPosition() { + try { + return fs.statSync(this.filename).size; + } catch (err) { + this.logger.error(`size check for ${this.filename} failed: ${err}`); + this.emit("error", `size check for ${this.filename} failed: ${err}`); + throw err; + } + } + + readBlock() { + if (this.queue.length >= 1) { + const block = this.queue[0]; + if (block.end > block.start) { + let stream = fs.createReadStream(this.filename, { start: block.start, end: block.end - 1, encoding: this.encoding }); + stream.on('error', (error) => { + this.logger.error(`Tail error: ${error}`); + this.emit('error', error); + }); + stream.on('end', () => { + let _ = this.queue.shift(); + if (this.queue.length > 0) { + this.internalDispatcher.emit('next'); + } + if (this.flushAtEOF && this.buffer.length > 0) { + this.emit('line', this.buffer); + this.buffer = ""; + } + }); + stream.on('data', (d) => { + if (this.separator === null) { + this.emit("line", d); + } else { + this.buffer += d; + let parts = this.buffer.split(this.separator); + this.buffer = parts.pop(); + for (const chunk of parts) { + this.emit("line", chunk); + } + } + }); + } + } + } + + change() { + let p = this.latestPosition() + if (p < this.currentCursorPos) {//scenario where text is not appended but it's actually a w+ + this.currentCursorPos = p + } else if (p > this.currentCursorPos) { + this.queue.push({ start: this.currentCursorPos, end: p }); + this.currentCursorPos = p + if (this.queue.length == 1) { + this.internalDispatcher.emit("next"); + } + } + } + + watch(startingCursor, flush) { + if (this.isWatching) return; + this.logger.info(`filesystem.watch present? ${fs.watch != undefined}`); + this.logger.info(`useWatchFile: ${this.useWatchFile}`); + + this.isWatching = true; + this.currentCursorPos = startingCursor; + //force a file flush is either fromBegining or nLines flags were passed. + if (flush) this.change(); + + if (!this.useWatchFile && fs.watch) { + this.logger.info(`watch strategy: watch`); + this.watcher = fs.watch(this.filename, this.fsWatchOptions, (e, filename) => { this.watchEvent(e, filename); }); + } else { + this.logger.info(`watch strategy: watchFile`); + fs.watchFile(this.filename, this.fsWatchOptions, (curr, prev) => { this.watchFileEvent(curr, prev) }); + } + } + + rename(filename) { + //TODO + //MacOS sometimes throws a rename event for no reason. + //Different platforms might behave differently. + //see https://nodejs.org/api/fs.html#fs_fs_watch_filename_options_listener + //filename might not be present. + //https://nodejs.org/api/fs.html#fs_filename_argument + //Better solution would be check inode but it will require a timeout and + // a sync file read. + if (filename === undefined || filename !== this.filename) { + this.unwatch(); + if (this.follow) { + this.filename = path.join(this.absPath, filename); + this.rewatchId = setTimeout((() => { + try { + this.watch(this.currentCursorPos); + } catch (ex) { + this.logger.error(`'rename' event for ${this.filename}. File not available anymore.`); + this.emit("error", ex); + } + }), 1000); + } else { + this.logger.error(`'rename' event for ${this.filename}. File not available anymore.`); + this.emit("error", `'rename' event for ${this.filename}. File not available anymore.`); + } + } else { + // this.logger.info("rename event but same filename") + } + } + + watchEvent(e, evtFilename) { + try { + if (e === 'change') { + this.change(); + } else if (e === 'rename') { + this.rename(evtFilename); + } + } catch (err) { + this.logger.error(`watchEvent for ${this.filename} failed: ${err}`); + this.emit("error", `watchEvent for ${this.filename} failed: ${err}`); + } + } + + watchFileEvent(curr, prev) { + if (curr.size > prev.size) { + this.currentCursorPos = curr.size; //Update this.currentCursorPos so that a consumer can determine if entire file has been handled + this.queue.push({ start: prev.size, end: curr.size }); + if (this.queue.length == 1) { + this.internalDispatcher.emit("next"); + } + } + } + + unwatch() { + if (this.watcher) { + this.watcher.close(); + } else { + fs.unwatchFile(this.filename); + } + if (this.rewatchId) { + clearTimeout(this.rewatchId); + this.rewatchId = undefined; + } + this.isWatching = false; + this.queue = [];// TODO: is this correct behaviour? + if (this.logger) { + this.logger.info(`Unwatch ${this.filename}`); + } + } + +} + +exports.Tail = Tail + + /***/ }), /***/ 4294: @@ -3922,153 +7092,603 @@ function validate(uuid) { return typeof uuid === 'string' && _regex.default.test(uuid); } -var _default = validate; -exports["default"] = _default; +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 1595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 6143: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { isexe, sync: isexeSync } = __nccwpck_require__(5200) +const { join, delimiter, sep, posix } = __nccwpck_require__(1017) + +const isWindows = process.platform === 'win32' + +// used to check for slashed in commands passed in. always checks for the posix +// seperator on all platforms, and checks for the current separator when not on +// a posix platform. don't use the isWindows check for this since that is mocked +// in tests but we still need the code to actually work when called. that is also +// why it is ignored from coverage. +/* istanbul ignore next */ +const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1')) +const rRel = new RegExp(`^\\.${rSlash.source}`) + +const getNotFoundError = (cmd) => + Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) + +const getPathInfo = (cmd, { + path: optPath = process.env.PATH, + pathExt: optPathExt = process.env.PATHEXT, + delimiter: optDelimiter = delimiter, +}) => { + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + const pathEnv = cmd.match(rSlash) ? [''] : [ + // windows always checks the cwd first + ...(isWindows ? [process.cwd()] : []), + ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter), + ] + + if (isWindows) { + const pathExtExe = optPathExt || + ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter) + const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()]) + if (cmd.includes('.') && pathExt[0] !== '') { + pathExt.unshift('') + } + return { pathEnv, pathExt, pathExtExe } + } + + return { pathEnv, pathExt: [''] } +} + +const getPathPart = (raw, cmd) => { + const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw + const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : '' + return prefix + join(pathPart, cmd) +} + +const which = async (cmd, opt = {}) => { + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (const envPart of pathEnv) { + const p = getPathPart(envPart, cmd) + + for (const ext of pathExt) { + const withExt = p + ext + const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true }) + if (is) { + if (!opt.all) { + return withExt + } + found.push(withExt) + } + } + } + + if (opt.all && found.length) { + return found + } + + if (opt.nothrow) { + return null + } + + throw getNotFoundError(cmd) +} + +const whichSync = (cmd, opt = {}) => { + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (const pathEnvPart of pathEnv) { + const p = getPathPart(pathEnvPart, cmd) + + for (const ext of pathExt) { + const withExt = p + ext + const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true }) + if (is) { + if (!opt.all) { + return withExt + } + found.push(withExt) + } + } + } + + if (opt.all && found.length) { + return found + } + + if (opt.nothrow) { + return null + } + + throw getNotFoundError(cmd) +} + +module.exports = which +which.sync = whichSync + + +/***/ }), + +/***/ 4091: +/***/ ((module) => { + +"use strict"; + +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } + } +} + + +/***/ }), + +/***/ 665: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +module.exports = Yallist + +Yallist.Node = Node +Yallist.create = Yallist + +function Yallist (list) { + var self = this + if (!(self instanceof Yallist)) { + self = new Yallist() + } + + self.tail = null + self.head = null + self.length = 0 + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item) + }) + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]) + } + } + + return self +} + +Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next + var prev = node.prev + + if (next) { + next.prev = prev + } + + if (prev) { + prev.next = next + } + + if (node === this.head) { + this.head = next + } + if (node === this.tail) { + this.tail = prev + } + + node.list.length-- + node.next = null + node.prev = null + node.list = null + + return next +} + +Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var head = this.head + node.list = this + node.next = head + if (head) { + head.prev = node + } + + this.head = node + if (!this.tail) { + this.tail = node + } + this.length++ +} + +Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var tail = this.tail + node.list = this + node.prev = tail + if (tail) { + tail.next = node + } + + this.tail = node + if (!this.head) { + this.head = node + } + this.length++ +} + +Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]) + } + return this.length +} -/***/ }), +Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } -/***/ 1595: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var res = this.tail.value + this.tail = this.tail.prev + if (this.tail) { + this.tail.next = null + } else { + this.head = null + } + this.length-- + return res +} -"use strict"; +Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + var res = this.head.value + this.head = this.head.next + if (this.head) { + this.head.prev = null + } else { + this.tail = null + } + this.length-- + return res +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this) + walker = walker.next + } +} -var _validate = _interopRequireDefault(__nccwpck_require__(6900)); +Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this) + walker = walker.prev + } +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next + } + if (i === n && walker !== null) { + return walker.value + } +} -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); +Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev } + if (i === n && walker !== null) { + return walker.value + } +} - return parseInt(uuid.substr(14, 1), 16); +Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.next + } + return res } -var _default = version; -exports["default"] = _default; +Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.prev + } + return res +} -/***/ }), +Yallist.prototype.reduce = function (fn, initial) { + var acc + var walker = this.head + if (arguments.length > 1) { + acc = initial + } else if (this.head) { + walker = this.head.next + acc = this.head.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } -/***/ 6143: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i) + walker = walker.next + } -const { isexe, sync: isexeSync } = __nccwpck_require__(5200) -const { join, delimiter, sep, posix } = __nccwpck_require__(1017) + return acc +} -const isWindows = process.platform === 'win32' +Yallist.prototype.reduceReverse = function (fn, initial) { + var acc + var walker = this.tail + if (arguments.length > 1) { + acc = initial + } else if (this.tail) { + walker = this.tail.prev + acc = this.tail.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } -// used to check for slashed in commands passed in. always checks for the posix -// seperator on all platforms, and checks for the current separator when not on -// a posix platform. don't use the isWindows check for this since that is mocked -// in tests but we still need the code to actually work when called. that is also -// why it is ignored from coverage. -/* istanbul ignore next */ -const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1')) -const rRel = new RegExp(`^\\.${rSlash.source}`) + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i) + walker = walker.prev + } -const getNotFoundError = (cmd) => - Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) + return acc +} -const getPathInfo = (cmd, { - path: optPath = process.env.PATH, - pathExt: optPathExt = process.env.PATHEXT, - delimiter: optDelimiter = delimiter, -}) => { - // If it has a slash, then we don't bother searching the pathenv. - // just check the file itself, and that's it. - const pathEnv = cmd.match(rSlash) ? [''] : [ - // windows always checks the cwd first - ...(isWindows ? [process.cwd()] : []), - ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter), - ] +Yallist.prototype.toArray = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value + walker = walker.next + } + return arr +} - if (isWindows) { - const pathExtExe = optPathExt || - ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter) - const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()]) - if (cmd.includes('.') && pathExt[0] !== '') { - pathExt.unshift('') - } - return { pathEnv, pathExt, pathExtExe } +Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value + walker = walker.prev } + return arr +} - return { pathEnv, pathExt: [''] } +Yallist.prototype.slice = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value) + } + return ret } -const getPathPart = (raw, cmd) => { - const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw - const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : '' - return prefix + join(pathPart, cmd) +Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value) + } + return ret } -const which = async (cmd, opt = {}) => { - const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) - const found = [] +Yallist.prototype.splice = function (start, deleteCount, ...nodes) { + if (start > this.length) { + start = this.length - 1 + } + if (start < 0) { + start = this.length + start; + } - for (const envPart of pathEnv) { - const p = getPathPart(envPart, cmd) + for (var i = 0, walker = this.head; walker !== null && i < start; i++) { + walker = walker.next + } - for (const ext of pathExt) { - const withExt = p + ext - const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true }) - if (is) { - if (!opt.all) { - return withExt - } - found.push(withExt) - } - } + var ret = [] + for (var i = 0; walker && i < deleteCount; i++) { + ret.push(walker.value) + walker = this.removeNode(walker) + } + if (walker === null) { + walker = this.tail } - if (opt.all && found.length) { - return found + if (walker !== this.head && walker !== this.tail) { + walker = walker.prev } - if (opt.nothrow) { - return null + for (var i = 0; i < nodes.length; i++) { + walker = insert(this, walker, nodes[i]) } + return ret; +} - throw getNotFoundError(cmd) +Yallist.prototype.reverse = function () { + var head = this.head + var tail = this.tail + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev + walker.prev = walker.next + walker.next = p + } + this.head = tail + this.tail = head + return this } -const whichSync = (cmd, opt = {}) => { - const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) - const found = [] +function insert (self, node, value) { + var inserted = node === self.head ? + new Node(value, null, node, self) : + new Node(value, node, node.next, self) - for (const pathEnvPart of pathEnv) { - const p = getPathPart(pathEnvPart, cmd) + if (inserted.next === null) { + self.tail = inserted + } + if (inserted.prev === null) { + self.head = inserted + } - for (const ext of pathExt) { - const withExt = p + ext - const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true }) - if (is) { - if (!opt.all) { - return withExt - } - found.push(withExt) - } - } + self.length++ + + return inserted +} + +function push (self, item) { + self.tail = new Node(item, self.tail, null, self) + if (!self.head) { + self.head = self.tail } + self.length++ +} - if (opt.all && found.length) { - return found +function unshift (self, item) { + self.head = new Node(item, null, self.head, self) + if (!self.tail) { + self.tail = self.head + } + self.length++ +} + +function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) } - if (opt.nothrow) { - return null + this.list = list + this.value = value + + if (prev) { + prev.next = this + this.prev = prev + } else { + this.prev = null } - throw getNotFoundError(cmd) + if (next) { + next.prev = this + this.next = next + } else { + this.next = null + } } -module.exports = which -which.sync = whichSync +try { + // add if support for Symbol.iterator is present + __nccwpck_require__(4091)(Yallist) +} catch (er) {} /***/ }), @@ -4105,11 +7725,16 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IsPost = void 0; const core = __importStar(__nccwpck_require__(2186)); const exec = __importStar(__nccwpck_require__(1514)); +const node_child_process_1 = __nccwpck_require__(7718); +const fs = __importStar(__nccwpck_require__(3977)); +const node_fs_1 = __nccwpck_require__(7561); +const os = __importStar(__nccwpck_require__(612)); +const path = __importStar(__nccwpck_require__(9411)); +const tail_1 = __nccwpck_require__(5824); const which_1 = __importDefault(__nccwpck_require__(6143)); -exports.IsPost = !!process.env['STATE_isPost']; +const semver_1 = __importDefault(__nccwpck_require__(1383)); // inputs const name = core.getInput('name', { required: true }); const extraPullNames = core.getInput('extraPullNames'); @@ -4119,29 +7744,50 @@ const skipPush = core.getInput('skipPush'); const pathsToPush = core.getInput('pathsToPush'); const pushFilter = core.getInput('pushFilter'); const cachixArgs = core.getInput('cachixArgs'); +const skipAddingSubstituter = core.getInput('skipAddingSubstituter'); +const useDaemon = (core.getInput('useDaemon') === 'true') ? true : false; +const cachixBinInput = core.getInput('cachixBin'); const installCommand = core.getInput('installCommand') || "nix-env --quiet -j8 -iA cachix -f https://cachix.org/api/v1/install"; -const skipAddingSubstituter = core.getInput('skipAddingSubstituter'); +const ENV_CACHIX_DAEMON_DIR = 'CACHIX_DAEMON_DIR'; async function setup() { try { - if (!which_1.default.sync('cachix', { nothrow: true })) { - core.startGroup('Cachix: installing'); - await exec.exec('bash', ['-c', installCommand]); - core.endGroup(); + let cachixBin = cachixBinInput; + if (cachixBin !== "") { + core.debug(`Using Cachix executable from input: ${cachixBin}`); + } + else { + // Find the Cachix executable in PATH + let resolvedCachixBin = which_1.default.sync('cachix', { nothrow: true }); + if (resolvedCachixBin) { + core.debug(`Found Cachix executable: ${cachixBin}`); + cachixBin = resolvedCachixBin; + } + else { + core.startGroup('Cachix: installing'); + await exec.exec('bash', ['-c', installCommand]); + cachixBin = which_1.default.sync('cachix'); + core.debug(`Installed Cachix executable: ${cachixBin}`); + core.endGroup(); + } } + core.saveState('cachixBin', cachixBin); + // Print the executable version. + // Also verifies that the binary exists and is executable. core.startGroup('Cachix: checking version'); - await exec.exec('cachix', ['--version']); + let cachixVersion = await execToVariable(cachixBin, ['--version']) + .then((res) => semver_1.default.coerce(res.split(" ")[1])); core.endGroup(); // for managed signing key and private caches if (authToken !== "") { - await exec.exec('cachix', ['authtoken', authToken]); + await exec.exec(cachixBin, ['authtoken', authToken]); } if (skipAddingSubstituter === 'true') { core.info('Not adding Cachix cache to substituters as skipAddingSubstituter is set to true'); } else { core.startGroup(`Cachix: using cache ` + name); - await exec.exec('cachix', ['use', name]); + await exec.exec(cachixBin, ['use', name]); core.endGroup(); } if (extraPullNames != "") { @@ -4149,15 +7795,61 @@ async function setup() { const extraPullNameList = extraPullNames.split(','); for (let itemName of extraPullNameList) { const trimmedItemName = itemName.trim(); - await exec.exec('cachix', ['use', trimmedItemName]); + await exec.exec(cachixBin, ['use', trimmedItemName]); } core.endGroup(); } if (signingKey !== "") { core.exportVariable('CACHIX_SIGNING_KEY', signingKey); } - // Remember existing store paths - await exec.exec("sh", ["-c", `${__dirname}/list-nix-store.sh > /tmp/store-path-pre-build`]); + let supportsDaemonInterface = (cachixVersion) ? semver_1.default.gte(cachixVersion, '1.7.0') : false; + let supportsPostBuildHook = await isTrustedUser(); + let hasPushCredentials = signingKey !== "" || authToken !== ""; + if (useDaemon && !supportsDaemonInterface) { + core.warning(`Cachix Daemon is not supported by this version of Cachix (${cachixVersion}). Ignoring the 'useDaemon' option.`); + } + if (useDaemon && !supportsPostBuildHook) { + core.warning("This user is not allowed to set the post-build-hook. Ignoring the 'useDaemon' option."); + } + if (useDaemon && !hasPushCredentials) { + core.warning("No push credentials found. Ignoring the 'useDaemon' option."); + } + let supportsDaemon = supportsDaemonInterface && supportsPostBuildHook && hasPushCredentials; + core.saveState('supportsDaemon', supportsDaemon); + if (useDaemon && supportsDaemon) { + const tmpdir = process.env['RUNNER_TEMP'] ?? os.tmpdir(); + const daemonDir = await fs.mkdtemp(path.join(tmpdir, 'cachix-daemon-')); + const daemonLog = (0, node_fs_1.openSync)(`${daemonDir}/daemon.log`, 'a'); + const daemon = (0, node_child_process_1.spawn)(cachixBin, [ + 'daemon', 'run', + '--socket', `${daemonDir}/daemon.sock`, + name, + ], { + stdio: ['ignore', daemonLog, daemonLog], + detached: true, + }); + daemon.on('error', (err) => { + core.error(`Cachix Daemon failed: ${err}`); + }); + if (typeof daemon.pid === 'number') { + const pid = daemon.pid.toString(); + core.debug(`Spawned Cachix Daemon with PID: ${pid}`); + await fs.writeFile(pidFilePath(daemonDir), pid); + } + else { + core.error('Failed to spawn Cachix Daemon'); + return; + } + await registerPostBuildHook(cachixBin, daemonDir); + // Expose the daemon directory for the post action hook + core.exportVariable(ENV_CACHIX_DAEMON_DIR, daemonDir); + // Detach the daemon process from the current process + daemon.unref(); + } + else { + // Remember existing store paths + await exec.exec("sh", ["-c", `${__dirname}/list-nix-store.sh > /tmp/store-path-pre-build`]); + } } catch (error) { core.setFailed(`Action failed with error: ${error}`); @@ -4165,15 +7857,43 @@ async function setup() { } async function upload() { core.startGroup('Cachix: push'); + const cachixBin = core.getState('cachixBin'); + const supportsDaemon = core.getState('supportsDaemon'); try { if (skipPush === 'true') { core.info('Pushing is disabled as skipPush is set to true'); } else if (signingKey !== "" || authToken !== "") { - await exec.exec(`${__dirname}/push-paths.sh`, ['cachix', cachixArgs, name, pathsToPush, pushFilter]); + if (useDaemon && supportsDaemon) { + const daemonDir = process.env[ENV_CACHIX_DAEMON_DIR]; + if (!daemonDir) { + core.debug('Cachix Daemon not started. Skipping push'); + return; + } + const daemonPid = parseInt(await fs.readFile(pidFilePath(daemonDir), { encoding: 'utf8' })); + if (!daemonPid) { + core.error('Failed to find PID of Cachix Daemon. Skipping push.'); + return; + } + core.debug(`Found Cachix daemon with pid ${daemonPid}`); + let daemonLog = new tail_1.Tail(`${daemonDir}/daemon.log`, { fromBeginning: true }); + daemonLog.on('line', (line) => core.info(line)); + try { + core.debug('Waiting for Cachix daemon to exit...'); + await exec.exec(cachixBin, ["daemon", "stop", "--socket", `${daemonDir}/daemon.sock`]); + } + finally { + // Wait a bit for the logs to flush through + await new Promise((resolve) => setTimeout(resolve, 1000)); + daemonLog.unwatch(); + } + } + else { + await exec.exec(`${__dirname}/push-paths.sh`, [cachixBin, cachixArgs, name, pathsToPush, pushFilter]); + } } else { - core.info('Pushing is disabled as signingKey nor authToken are set (or are empty?) in your YAML file.'); + core.info('Pushing is disabled because neither signingKey nor authToken are set (or are empty?) in your YAML file.'); } } catch (error) { @@ -4181,12 +7901,146 @@ async function upload() { } core.endGroup(); } +function pidFilePath(daemonDir) { + return path.join(daemonDir, 'daemon.pid'); +} +// Exec a command and return the stdout as a string. +async function execToVariable(command, args, options) { + let res = ''; + options = options ?? {}; + options['listeners'] = { + stdout: (data) => { + res += data.toString(); + }, + }; + return exec.exec(command, args, options).then(() => res); +} +// Register the post-build-hook +// From the nix.conf manual: +// +// If NIX_USER_CONF_FILES is set, then each path separated by : will be loaded in reverse order. +// +// Otherwise it will look for nix / nix.conf files in XDG_CONFIG_DIRS and XDG_CONFIG_HOME.If +// unset, XDG_CONFIG_DIRS defaults to / etc / xdg, and XDG_CONFIG_HOME defaults to $HOME /.config +// as per XDG Base Directory Specification. +// +// The system nix.conf ($NIX_CONF_DIR/nix.conf) is always loaded first. +// +// If the user has overridden the default nix.conf locations with NIX_USER_CONF_DIR, we reuse it and prepend out own config. +// If the user has set NIX_CONF, we append our config to it. +async function registerPostBuildHook(cachixBin, daemonDir) { + const postBuildHookScriptPath = `${daemonDir}/post-build-hook.sh`; + await fs.writeFile(postBuildHookScriptPath, ` + #!/bin/sh + + set -eu + set -f # disable globbing + + exec ${cachixBin} daemon push \ + --socket ${daemonDir}/daemon.sock \ + $OUT_PATHS + `, + // Make the post-build-hook executable + { mode: 0o755 }); + core.debug(`Wrote post-build-hook script to ${postBuildHookScriptPath}`); + const postBuildHookConfigPath = `${daemonDir}/nix.conf`; + await fs.writeFile(postBuildHookConfigPath, `post-build-hook = ${postBuildHookScriptPath}`); + core.debug(`Wrote post-build-hook nix config to ${postBuildHookConfigPath}`); + const existingNixConf = process.env['NIX_CONF']; + if (existingNixConf) { + core.exportVariable('NIX_CONF', `${existingNixConf}\npost-build-hook = ${postBuildHookScriptPath}`); + core.debug('Registered post-build-hook in NIX_CONF'); + } + else { + const existingUserConfEnv = process.env['NIX_USER_CONF_FILES'] ?? ''; + let nixUserConfFilesEnv = ''; + if (existingUserConfEnv) { + nixUserConfFilesEnv = postBuildHookConfigPath + ':' + existingUserConfEnv; + } + else { + const userConfigFiles = getUserConfigFiles(); + nixUserConfFilesEnv = [postBuildHookConfigPath, ...userConfigFiles].filter((x) => x !== '').join(':'); + } + core.exportVariable('NIX_USER_CONF_FILES', nixUserConfFilesEnv); + core.debug(`Registered post-build-hook in NIX_USER_CONF_FILES=${process.env['NIX_USER_CONF_FILES']}`); + } +} +// Get the paths to the user config files. +function getUserConfigFiles() { + const userConfigDirs = getUserConfigDirs(); + return userConfigDirs.map((dir) => `${dir}/nix/nix.conf`); +} +// Get the user config directories. +function getUserConfigDirs() { + const xdgConfigHome = process.env['XDG_CONFIG_HOME'] ?? `${os.homedir()}/.config`; + const xdgConfigDirs = (process.env['XDG_CONFIG_DIRS'] ?? '/etc/xdg').split(':'); + return [xdgConfigHome, ...xdgConfigDirs]; +} +async function isTrustedUser() { + try { + let user = os.userInfo().username; + core.debug(`Checking if user ${user} is trusted`); + let userGroups = await execToVariable('id', ['-Gn', user], { silent: true }).then((str) => str.trim().split(' ')); + core.debug(`User ${user} is in groups ${userGroups}`); + let [trustedUsers, trustedGroups] = await fetchTrustedUsers().then(partitionUsersAndGroups); + core.debug(`Trusted users: ${trustedUsers}`); + core.debug(`Trusted groups: ${trustedGroups}`); + // Chech if Nix is installed in single-user mode. + let isStoreWritable = await isWritable('/nix/store'); + core.debug(`Is store writable: ${isStoreWritable}`); + let isTrustedUser = isStoreWritable + || trustedUsers.includes(user) + || trustedGroups.some((group) => userGroups.includes(group)); + core.debug(`User ${user} is trusted: ${isTrustedUser}`); + return isTrustedUser; + } + catch (err) { + core.warning('Failed to determine if the user is trusted. Assuming untrusted user.'); + core.debug(`error: ${err}`); + return false; + } +} +async function isWritable(path) { + try { + await fs.access(path, fs.constants.W_OK); + return true; + } + catch (error) { + return false; + } +} +async function fetchTrustedUsers() { + try { + let conf = await execToVariable('nix', ['show-config'], { silent: true }); + let match = conf.match(/trusted-users = (.+)/m); + return match?.length === 2 ? match[1].split(' ') : []; + } + catch (error) { + core.warning('Failed to read the Nix configuration'); + return []; + } +} +function partitionUsersAndGroups(mixedUsers) { + let users = []; + let groups = []; + mixedUsers.forEach((item) => { + if (item.startsWith('@')) { + groups.push(item.slice(1)); + } + else { + users.push(item); + } + }); + return [users, groups]; +} +const isPost = !!core.getState('isPost'); // Main -if (!exports.IsPost) { +if (!isPost) { // Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic. // This is necessary since we don't have a separate entry point. core.saveState('isPost', 'true'); setup(); + core.debug('Setup done'); } else { // Post @@ -4268,6 +8122,46 @@ module.exports = require("net"); /***/ }), +/***/ 7718: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:child_process"); + +/***/ }), + +/***/ 7561: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:fs"); + +/***/ }), + +/***/ 3977: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:fs/promises"); + +/***/ }), + +/***/ 612: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:os"); + +/***/ }), + +/***/ 9411: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:path"); + +/***/ }), + /***/ 2037: /***/ ((module) => { diff --git a/package.json b/package.json index abd850d6..989163e3 100644 --- a/package.json +++ b/package.json @@ -22,11 +22,15 @@ "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", + "semver": "^7.5.4", + "tail": "^2.2.6", "which": "^4.0.0" }, "devDependencies": { "@types/jest": "^29.5.3", "@types/node": "^20.5.0", + "@types/semver": "^7.5.4", + "@types/tail": "^2.2.1", "@types/which": "^3.0.0", "@vercel/ncc": "^0.36.1", "jest": "^29.6.2", diff --git a/src/main.ts b/src/main.ts index 757f1ef4..5b6c22fa 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,8 +1,13 @@ import * as core from '@actions/core'; import * as exec from '@actions/exec'; +import { spawn } from 'node:child_process'; +import * as fs from 'node:fs/promises'; +import { openSync } from 'node:fs'; +import * as os from 'node:os'; +import * as path from 'node:path'; +import { Tail } from 'tail'; import which from 'which'; - -export const IsPost = !!process.env['STATE_isPost'] +import semver from 'semver'; // inputs const name = core.getInput('name', { required: true }); @@ -13,33 +18,57 @@ const skipPush = core.getInput('skipPush'); const pathsToPush = core.getInput('pathsToPush'); const pushFilter = core.getInput('pushFilter'); const cachixArgs = core.getInput('cachixArgs'); +const skipAddingSubstituter = core.getInput('skipAddingSubstituter'); +const useDaemon = (core.getInput('useDaemon') === 'true') ? true : false; +const cachixBinInput = core.getInput('cachixBin'); const installCommand = core.getInput('installCommand') || "nix-env --quiet -j8 -iA cachix -f https://cachix.org/api/v1/install"; -const skipAddingSubstituter = core.getInput('skipAddingSubstituter'); + +const ENV_CACHIX_DAEMON_DIR = 'CACHIX_DAEMON_DIR'; async function setup() { try { - if(!which.sync('cachix', { nothrow: true })) { - core.startGroup('Cachix: installing') - await exec.exec('bash', ['-c', installCommand]); - core.endGroup() + let cachixBin = cachixBinInput; + + if (cachixBin !== "") { + core.debug(`Using Cachix executable from input: ${cachixBin}`); + } else { + // Find the Cachix executable in PATH + let resolvedCachixBin = which.sync('cachix', { nothrow: true }); + + if (resolvedCachixBin) { + core.debug(`Found Cachix executable: ${cachixBin}`); + cachixBin = resolvedCachixBin; + } else { + core.startGroup('Cachix: installing') + await exec.exec('bash', ['-c', installCommand]); + cachixBin = which.sync('cachix'); + core.debug(`Installed Cachix executable: ${cachixBin}`); + core.endGroup() + } } + core.saveState('cachixBin', cachixBin); + + // Print the executable version. + // Also verifies that the binary exists and is executable. core.startGroup('Cachix: checking version') - await exec.exec('cachix', ['--version']); + let cachixVersion = + await execToVariable(cachixBin, ['--version']) + .then((res) => semver.coerce(res.split(" ")[1])); core.endGroup() // for managed signing key and private caches if (authToken !== "") { - await exec.exec('cachix', ['authtoken', authToken]); + await exec.exec(cachixBin, ['authtoken', authToken]); } if (skipAddingSubstituter === 'true') { core.info('Not adding Cachix cache to substituters as skipAddingSubstituter is set to true') } else { core.startGroup(`Cachix: using cache ` + name); - await exec.exec('cachix', ['use', name]); + await exec.exec(cachixBin, ['use', name]); core.endGroup(); } @@ -48,7 +77,7 @@ async function setup() { const extraPullNameList = extraPullNames.split(','); for (let itemName of extraPullNameList) { const trimmedItemName = itemName.trim(); - await exec.exec('cachix', ['use', trimmedItemName]); + await exec.exec(cachixBin, ['use', trimmedItemName]); } core.endGroup(); } @@ -56,8 +85,65 @@ async function setup() { if (signingKey !== "") { core.exportVariable('CACHIX_SIGNING_KEY', signingKey); } - // Remember existing store paths - await exec.exec("sh", ["-c", `${__dirname}/list-nix-store.sh > /tmp/store-path-pre-build`]); + + let supportsDaemonInterface = (cachixVersion) ? semver.gte(cachixVersion, '1.7.0') : false; + let supportsPostBuildHook = await isTrustedUser(); + let hasPushCredentials = signingKey !== "" || authToken !== ""; + if (useDaemon && !supportsDaemonInterface) { + core.warning(`Cachix Daemon is not supported by this version of Cachix (${cachixVersion}). Ignoring the 'useDaemon' option.`) + } + if (useDaemon && !supportsPostBuildHook) { + core.warning("This user is not allowed to set the post-build-hook. Ignoring the 'useDaemon' option."); + } + if (useDaemon && !hasPushCredentials) { + core.warning("No push credentials found. Ignoring the 'useDaemon' option."); + } + + let supportsDaemon = supportsDaemonInterface && supportsPostBuildHook && hasPushCredentials; + core.saveState('supportsDaemon', supportsDaemon); + + if (useDaemon && supportsDaemon) { + const tmpdir = process.env['RUNNER_TEMP'] ?? os.tmpdir(); + const daemonDir = await fs.mkdtemp(path.join(tmpdir, 'cachix-daemon-')); + const daemonLog = openSync(`${daemonDir}/daemon.log`, 'a'); + + const daemon = spawn( + cachixBin, + [ + 'daemon', 'run', + '--socket', `${daemonDir}/daemon.sock`, + name, + ], + { + stdio: ['ignore', daemonLog, daemonLog], + detached: true, + } + ); + + daemon.on('error', (err) => { + core.error(`Cachix Daemon failed: ${err}`); + }); + + if (typeof daemon.pid === 'number') { + const pid = daemon.pid.toString(); + core.debug(`Spawned Cachix Daemon with PID: ${pid}`); + await fs.writeFile(pidFilePath(daemonDir), pid); + } else { + core.error('Failed to spawn Cachix Daemon'); + return; + } + + await registerPostBuildHook(cachixBin, daemonDir); + + // Expose the daemon directory for the post action hook + core.exportVariable(ENV_CACHIX_DAEMON_DIR, daemonDir); + + // Detach the daemon process from the current process + daemon.unref(); + } else { + // Remember existing store paths + await exec.exec("sh", ["-c", `${__dirname}/list-nix-store.sh > /tmp/store-path-pre-build`]); + } } catch (error) { core.setFailed(`Action failed with error: ${error}`); } @@ -65,26 +151,222 @@ async function setup() { async function upload() { core.startGroup('Cachix: push'); + + const cachixBin = core.getState('cachixBin'); + const supportsDaemon = core.getState('supportsDaemon'); + try { if (skipPush === 'true') { core.info('Pushing is disabled as skipPush is set to true'); } else if (signingKey !== "" || authToken !== "") { - await exec.exec(`${__dirname}/push-paths.sh`, ['cachix', cachixArgs, name, pathsToPush, pushFilter]); + if (useDaemon && supportsDaemon) { + const daemonDir = process.env[ENV_CACHIX_DAEMON_DIR]; + + if (!daemonDir) { + core.debug('Cachix Daemon not started. Skipping push'); + return; + } + + const daemonPid = parseInt(await fs.readFile(pidFilePath(daemonDir), { encoding: 'utf8' })); + + if (!daemonPid) { + core.error('Failed to find PID of Cachix Daemon. Skipping push.'); + return; + } + + core.debug(`Found Cachix daemon with pid ${daemonPid}`); + + let daemonLog = new Tail(`${daemonDir}/daemon.log`, { fromBeginning: true }); + daemonLog.on('line', (line) => core.info(line)); + + try { + core.debug('Waiting for Cachix daemon to exit...'); + await exec.exec(cachixBin, ["daemon", "stop", "--socket", `${daemonDir}/daemon.sock`]); + } finally { + // Wait a bit for the logs to flush through + await new Promise((resolve) => setTimeout(resolve, 1000)); + daemonLog.unwatch(); + } + } else { + await exec.exec(`${__dirname}/push-paths.sh`, [cachixBin, cachixArgs, name, pathsToPush, pushFilter]); + } } else { - core.info('Pushing is disabled as signingKey nor authToken are set (or are empty?) in your YAML file.'); + core.info('Pushing is disabled because neither signingKey nor authToken are set (or are empty?) in your YAML file.'); } } catch (error) { core.setFailed(`Action failed with error: ${error}`); } + core.endGroup(); } +function pidFilePath(daemonDir: string): string { + return path.join(daemonDir, 'daemon.pid'); +} + +// Exec a command and return the stdout as a string. +async function execToVariable(command: string, args?: string[], options?: exec.ExecOptions): Promise { + let res = ''; + options = options ?? {}; + + options['listeners'] = { + stdout: (data: Buffer) => { + res += data.toString(); + }, + }; + + return exec.exec(command, args, options).then(() => res); +} + +// Register the post-build-hook + +// From the nix.conf manual: +// +// If NIX_USER_CONF_FILES is set, then each path separated by : will be loaded in reverse order. +// +// Otherwise it will look for nix / nix.conf files in XDG_CONFIG_DIRS and XDG_CONFIG_HOME.If +// unset, XDG_CONFIG_DIRS defaults to / etc / xdg, and XDG_CONFIG_HOME defaults to $HOME /.config +// as per XDG Base Directory Specification. +// +// The system nix.conf ($NIX_CONF_DIR/nix.conf) is always loaded first. +// +// If the user has overridden the default nix.conf locations with NIX_USER_CONF_DIR, we reuse it and prepend out own config. +// If the user has set NIX_CONF, we append our config to it. +async function registerPostBuildHook(cachixBin: string, daemonDir: string) { + const postBuildHookScriptPath = `${daemonDir}/post-build-hook.sh`; + await fs.writeFile(postBuildHookScriptPath, ` + #!/bin/sh + + set -eu + set -f # disable globbing + + exec ${cachixBin} daemon push \ + --socket ${daemonDir}/daemon.sock \ + $OUT_PATHS + `, + // Make the post-build-hook executable + { mode: 0o755 } + ); + core.debug(`Wrote post-build-hook script to ${postBuildHookScriptPath}`); + + const postBuildHookConfigPath = `${daemonDir}/nix.conf`; + await fs.writeFile( + postBuildHookConfigPath, + `post-build-hook = ${postBuildHookScriptPath}` + ); + core.debug(`Wrote post-build-hook nix config to ${postBuildHookConfigPath}`); + + const existingNixConf = process.env['NIX_CONF']; + if (existingNixConf) { + core.exportVariable('NIX_CONF', `${existingNixConf}\npost-build-hook = ${postBuildHookScriptPath}`); + core.debug('Registered post-build-hook in NIX_CONF'); + } else { + const existingUserConfEnv = process.env['NIX_USER_CONF_FILES'] ?? ''; + let nixUserConfFilesEnv = ''; + + if (existingUserConfEnv) { + nixUserConfFilesEnv = postBuildHookConfigPath + ':' + existingUserConfEnv; + } else { + const userConfigFiles = getUserConfigFiles(); + nixUserConfFilesEnv = [postBuildHookConfigPath, ...userConfigFiles].filter((x) => x !== '').join(':'); + } + + core.exportVariable( + 'NIX_USER_CONF_FILES', + nixUserConfFilesEnv, + ); + core.debug(`Registered post-build-hook in NIX_USER_CONF_FILES=${process.env['NIX_USER_CONF_FILES']}`); + } +} + +// Get the paths to the user config files. +function getUserConfigFiles(): string[] { + const userConfigDirs = getUserConfigDirs(); + return userConfigDirs.map((dir) => `${dir}/nix/nix.conf`); +} + +// Get the user config directories. +function getUserConfigDirs(): string[] { + const xdgConfigHome = process.env['XDG_CONFIG_HOME'] ?? `${os.homedir()}/.config`; + const xdgConfigDirs = (process.env['XDG_CONFIG_DIRS'] ?? '/etc/xdg').split(':'); + return [xdgConfigHome, ...xdgConfigDirs]; +} + +async function isTrustedUser(): Promise { + try { + let user = os.userInfo().username; + core.debug(`Checking if user ${user} is trusted`); + + let userGroups = await execToVariable('id', ['-Gn', user], { silent: true }).then((str) => str.trim().split(' ')); + core.debug(`User ${user} is in groups ${userGroups}`); + + let [trustedUsers, trustedGroups] = await fetchTrustedUsers().then(partitionUsersAndGroups); + core.debug(`Trusted users: ${trustedUsers}`); + core.debug(`Trusted groups: ${trustedGroups}`); + + // Chech if Nix is installed in single-user mode. + let isStoreWritable = await isWritable('/nix/store'); + core.debug(`Is store writable: ${isStoreWritable}`); + + let isTrustedUser = + isStoreWritable + || trustedUsers.includes(user) + || trustedGroups.some((group) => userGroups.includes(group)); + + core.debug(`User ${user} is trusted: ${isTrustedUser}`); + + return isTrustedUser; + } catch (err) { + core.warning('Failed to determine if the user is trusted. Assuming untrusted user.'); + core.debug(`error: ${err}`); + return false; + } +} + +async function isWritable(path: string): Promise { + try { + await fs.access(path, fs.constants.W_OK); + return true; + } catch (error) { + return false; + } +} + +async function fetchTrustedUsers(): Promise { + try { + let conf = await execToVariable('nix', ['show-config'], { silent: true }); + let match = conf.match(/trusted-users = (.+)/m); + return match?.length === 2 ? match[1].split(' ') : []; + } catch (error) { + core.warning('Failed to read the Nix configuration'); + return []; + } +} + +function partitionUsersAndGroups(mixedUsers: string[]): [string[], string[]] { + let users: string[] = []; + let groups: string[] = [] + + mixedUsers.forEach((item) => { + if (item.startsWith('@')) { + groups.push(item.slice(1)); + } else { + users.push(item); + } + }); + + return [users, groups]; +} + +const isPost = !!core.getState('isPost'); + // Main -if (!IsPost) { +if (!isPost) { // Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic. // This is necessary since we don't have a separate entry point. core.saveState('isPost', 'true'); setup() + core.debug('Setup done'); } else { // Post upload() diff --git a/yarn.lock b/yarn.lock index f24a401d..19807419 100644 --- a/yarn.lock +++ b/yarn.lock @@ -690,11 +690,21 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-20.5.9.tgz#a70ec9d8fa0180a314c3ede0e20ea56ff71aed9a" integrity sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ== +"@types/semver@^7.5.4": + version "7.5.4" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.4.tgz#0a41252ad431c473158b22f9bfb9a63df7541cff" + integrity sha512-MMzuxN3GdFwskAnb6fz0orFvhfqi752yjaXylr0Rp4oDg5H0Zn1IuyRhDVvYOwAXoJirx2xuS16I3WjxnAIHiQ== + "@types/stack-utils@^2.0.0": version "2.0.1" resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== +"@types/tail@^2.2.1": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@types/tail/-/tail-2.2.1.tgz#ca8d8ea542cf3fefbdac9e8f131ae04990596f1e" + integrity sha512-j75Gs5MiIpNR14wztQ4vtViUqxZi+lcgflyXC7P9iMgNnMab7XcV5p+2590IO3njsWWn5l8C+55ILk2CDDyaHg== + "@types/which@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/which/-/which-3.0.0.tgz#849afdd9fdcb0b67339b9cfc80fa6ea4e0253fc5" @@ -2143,6 +2153,11 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== +tail@^2.2.6: + version "2.2.6" + resolved "https://registry.yarnpkg.com/tail/-/tail-2.2.6.tgz#24abd701963639b896c42496d5f416216ec0b558" + integrity sha512-IQ6G4wK/t8VBauYiGPLx+d3fA5XjSVagjWV5SIYzvEvglbQjwEcukeYI68JOPpdydjxhZ9sIgzRlSmwSpphHyw== + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e"