chore: update dependencies, remove unused ones (#100)

* chore: update dependencies, remove unused ones

* fix types

* Updates snapshots

* Fixes prepack

* try with Node.js 16.x

* Update more packages

* Upgrade to Jest 28

* Bump other packages

* Bump other packages

* Use Node.js 14.19.2

* Revert "try with Node.js 16.x"

This reverts commit 45aa8a3a96.
This reverts commit 4a275f68b6.

* Use specific Node.js version

* trying something

* Pin exact Node.js version number

* Bump Jest to 28.1.0

* dist-upgrade

* Don't pin Node.js version when it's not necessary

* Update more dependencies

* Update more dependencies

* Revert "Pin exact Node.js version number"

This reverts commit dd72a68061.

Co-authored-by: Maël Nison <nison.mael@gmail.com>
This commit is contained in:
Antoine du Hamel 2022-05-15 10:35:13 +02:00 committed by GitHub
parent 5ff6e82028
commit 6c736c3950
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
993 changed files with 18178 additions and 22992 deletions

View File

@ -14,17 +14,17 @@ jobs:
runs-on: ${{matrix.platform[1]}} runs-on: ${{matrix.platform[1]}}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: Install Node - name: Install Node
uses: actions/setup-node@v1 uses: actions/setup-node@v3
with: with:
node-version: 14.x node-version: 14.x
- name: 'Build Node with corepack master' - name: "Build Node with corepack master"
run: | run: |
yarn install --immutable corepack yarn install --immutable
yarn pack corepack yarn pack
git clone -b mael/pmm --depth=1 https://github.com/arcanis/node.git node && cd node git clone -b mael/pmm --depth=1 https://github.com/arcanis/node.git node && cd node
git config user.name 'John Doe' git config user.name 'John Doe'
git config user.email 'john@example.org' git config user.email 'john@example.org'
@ -39,7 +39,7 @@ jobs:
mv node-v15.0.0-nightlyYYYY-MM-DDXXXX-*.tar.gz node-corepack-${{matrix.platform[0]}}-x64.tar.gz mv node-v15.0.0-nightlyYYYY-MM-DDXXXX-*.tar.gz node-corepack-${{matrix.platform[0]}}-x64.tar.gz
- name: Upload build artifacts - name: Upload build artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v3
with: with:
name: node-corepack-${{matrix.platform[0]}}-x64 name: node-corepack-${{matrix.platform[0]}}-x64
path: node/node-corepack-${{matrix.platform[0]}}-x64.tar.gz path: node/node-corepack-${{matrix.platform[0]}}-x64.tar.gz

View File

@ -8,23 +8,23 @@ on:
jobs: jobs:
chore: chore:
name: 'Testing chores' name: "Testing chores"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- run: | - run: |
git fetch --no-tags --unshallow origin HEAD main git fetch --no-tags --unshallow origin HEAD main
- uses: actions/setup-node@v2 - uses: actions/setup-node@v3
with: with:
node-version: 14 node-version: 14
- name: 'Check that the cache files are consistent with their remote sources' - name: "Check that the cache files are consistent with their remote sources"
run: | run: |
if [[ $(git diff --name-only "$(git merge-base origin/"$TARGET_BRANCH" HEAD)" HEAD -- .yarn/cache | wc -l) -gt 0 ]]; then if [[ $(git diff --name-only "$(git merge-base origin/"$TARGET_BRANCH" HEAD)" HEAD -- .yarn/cache | wc -l) -gt 0 ]]; then
yarn --immutable --immutable-cache --check-cache corepack yarn --immutable --immutable-cache --check-cache
fi fi
shell: bash shell: bash
if: | if: |
@ -32,8 +32,8 @@ jobs:
env: env:
TARGET_BRANCH: ${{github.event.pull_request.base.ref}} TARGET_BRANCH: ${{github.event.pull_request.base.ref}}
- name: 'Check for type errors' - name: "Check for type errors"
run: yarn typecheck run: corepack yarn typecheck
build: build:
strategy: strategy:
@ -46,19 +46,19 @@ jobs:
- macos-latest - macos-latest
- windows-latest - windows-latest
name: '${{matrix.platform}} w/ Node.js ${{matrix.node}}.x' name: "${{matrix.platform}} w/ Node.js ${{matrix.node}}.x"
runs-on: ${{matrix.platform}} runs-on: ${{matrix.platform}}
needs: chore needs: chore
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: 'Use Node.js ${{matrix.node}}.x' - name: "Use Node.js ${{matrix.node}}.x"
uses: actions/setup-node@master uses: actions/setup-node@v3
with: with:
node-version: ${{matrix.node}}.x node-version: ${{matrix.node}}.x
- run: yarn install --immutable - run: corepack yarn install --immutable
- run: yarn build # We need the stubs to run the tests - run: corepack yarn build # We need the stubs to run the tests
- run: yarn eslint - run: corepack yarn eslint
- run: yarn jest - run: corepack yarn jest

View File

@ -3,21 +3,21 @@ name: Version Sync
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
- cron: '0 0 * * *' - cron: "0 0 * * *"
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: Install Node - name: Install Node
uses: actions/setup-node@v1 uses: actions/setup-node@v3
with: with:
node-version: 14.x node-version: 14.x
- name: 'Update the package manager versions' - name: "Update the package manager versions"
run: | run: |
LATEST_NPM=$(curl https://registry.npmjs.org/npm | jq '.["dist-tags"].latest') LATEST_NPM=$(curl https://registry.npmjs.org/npm | jq '.["dist-tags"].latest')
LATEST_PNPM=$(curl https://registry.npmjs.org/pnpm | jq '.["dist-tags"].latest') LATEST_PNPM=$(curl https://registry.npmjs.org/pnpm | jq '.["dist-tags"].latest')

29076
.pnp.cjs generated

File diff suppressed because one or more lines are too long

266
.pnp.loader.mjs generated Normal file
View File

@ -0,0 +1,266 @@
import { URL, fileURLToPath, pathToFileURL } from 'url';
import fs from 'fs';
import path from 'path';
import moduleExports, { Module } from 'module';
var PathType;
(function(PathType2) {
PathType2[PathType2["File"] = 0] = "File";
PathType2[PathType2["Portable"] = 1] = "Portable";
PathType2[PathType2["Native"] = 2] = "Native";
})(PathType || (PathType = {}));
const npath = Object.create(path);
const ppath = Object.create(path.posix);
npath.cwd = () => process.cwd();
ppath.cwd = () => toPortablePath(process.cwd());
ppath.resolve = (...segments) => {
if (segments.length > 0 && ppath.isAbsolute(segments[0])) {
return path.posix.resolve(...segments);
} else {
return path.posix.resolve(ppath.cwd(), ...segments);
}
};
const contains = function(pathUtils, from, to) {
from = pathUtils.normalize(from);
to = pathUtils.normalize(to);
if (from === to)
return `.`;
if (!from.endsWith(pathUtils.sep))
from = from + pathUtils.sep;
if (to.startsWith(from)) {
return to.slice(from.length);
} else {
return null;
}
};
npath.fromPortablePath = fromPortablePath;
npath.toPortablePath = toPortablePath;
npath.contains = (from, to) => contains(npath, from, to);
ppath.contains = (from, to) => contains(ppath, from, to);
const WINDOWS_PATH_REGEXP = /^([a-zA-Z]:.*)$/;
const UNC_WINDOWS_PATH_REGEXP = /^\/\/(\.\/)?(.*)$/;
const PORTABLE_PATH_REGEXP = /^\/([a-zA-Z]:.*)$/;
const UNC_PORTABLE_PATH_REGEXP = /^\/unc\/(\.dot\/)?(.*)$/;
function fromPortablePath(p) {
if (process.platform !== `win32`)
return p;
let portablePathMatch, uncPortablePathMatch;
if (portablePathMatch = p.match(PORTABLE_PATH_REGEXP))
p = portablePathMatch[1];
else if (uncPortablePathMatch = p.match(UNC_PORTABLE_PATH_REGEXP))
p = `\\\\${uncPortablePathMatch[1] ? `.\\` : ``}${uncPortablePathMatch[2]}`;
else
return p;
return p.replace(/\//g, `\\`);
}
function toPortablePath(p) {
if (process.platform !== `win32`)
return p;
p = p.replace(/\\/g, `/`);
let windowsPathMatch, uncWindowsPathMatch;
if (windowsPathMatch = p.match(WINDOWS_PATH_REGEXP))
p = `/${windowsPathMatch[1]}`;
else if (uncWindowsPathMatch = p.match(UNC_WINDOWS_PATH_REGEXP))
p = `/unc/${uncWindowsPathMatch[1] ? `.dot/` : ``}${uncWindowsPathMatch[2]}`;
return p;
}
const builtinModules = new Set(Module.builtinModules || Object.keys(process.binding(`natives`)));
const isBuiltinModule = (request) => request.startsWith(`node:`) || builtinModules.has(request);
function readPackageScope(checkPath) {
const rootSeparatorIndex = checkPath.indexOf(npath.sep);
let separatorIndex;
do {
separatorIndex = checkPath.lastIndexOf(npath.sep);
checkPath = checkPath.slice(0, separatorIndex);
if (checkPath.endsWith(`${npath.sep}node_modules`))
return false;
const pjson = readPackage(checkPath + npath.sep);
if (pjson) {
return {
data: pjson,
path: checkPath
};
}
} while (separatorIndex > rootSeparatorIndex);
return false;
}
function readPackage(requestPath) {
const jsonPath = npath.resolve(requestPath, `package.json`);
if (!fs.existsSync(jsonPath))
return null;
return JSON.parse(fs.readFileSync(jsonPath, `utf8`));
}
async function tryReadFile(path2) {
try {
return await fs.promises.readFile(path2, `utf8`);
} catch (error) {
if (error.code === `ENOENT`)
return null;
throw error;
}
}
function tryParseURL(str, base) {
try {
return new URL(str, base);
} catch {
return null;
}
}
function getFileFormat(filepath) {
var _a, _b;
const ext = path.extname(filepath);
switch (ext) {
case `.mjs`: {
return `module`;
}
case `.cjs`: {
return `commonjs`;
}
case `.wasm`: {
throw new Error(`Unknown file extension ".wasm" for ${filepath}`);
}
case `.json`: {
throw new Error(`Unknown file extension ".json" for ${filepath}`);
}
case `.js`: {
const pkg = readPackageScope(filepath);
if (!pkg)
return `commonjs`;
return (_a = pkg.data.type) != null ? _a : `commonjs`;
}
default: {
const isMain = process.argv[1] === filepath;
if (!isMain)
return null;
const pkg = readPackageScope(filepath);
if (!pkg)
return `commonjs`;
if (pkg.data.type === `module`)
return null;
return (_b = pkg.data.type) != null ? _b : `commonjs`;
}
}
}
async function getFormat$1(resolved, context, defaultGetFormat) {
const url = tryParseURL(resolved);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetFormat(resolved, context, defaultGetFormat);
const format = getFileFormat(fileURLToPath(url));
if (format) {
return {
format
};
}
return defaultGetFormat(resolved, context, defaultGetFormat);
}
async function getSource$1(urlString, context, defaultGetSource) {
const url = tryParseURL(urlString);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetSource(urlString, context, defaultGetSource);
return {
source: await fs.promises.readFile(fileURLToPath(url), `utf8`)
};
}
async function load$1(urlString, context, defaultLoad) {
const url = tryParseURL(urlString);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultLoad(urlString, context, defaultLoad);
const filePath = fileURLToPath(url);
const format = getFileFormat(filePath);
if (!format)
return defaultLoad(urlString, context, defaultLoad);
return {
format,
source: await fs.promises.readFile(filePath, `utf8`)
};
}
const pathRegExp = /^(?![a-zA-Z]:[\\/]|\\\\|\.{0,2}(?:\/|$))((?:node:)?(?:@[^/]+\/)?[^/]+)\/*(.*|)$/;
const isRelativeRegexp = /^\.{0,2}\//;
async function resolve$1(originalSpecifier, context, defaultResolver) {
var _a;
const {findPnpApi} = moduleExports;
if (!findPnpApi || isBuiltinModule(originalSpecifier))
return defaultResolver(originalSpecifier, context, defaultResolver);
let specifier = originalSpecifier;
const url = tryParseURL(specifier, isRelativeRegexp.test(specifier) ? context.parentURL : void 0);
if (url) {
if (url.protocol !== `file:`)
return defaultResolver(originalSpecifier, context, defaultResolver);
specifier = fileURLToPath(url);
}
const {parentURL, conditions = []} = context;
const issuer = parentURL ? fileURLToPath(parentURL) : process.cwd();
const pnpapi = (_a = findPnpApi(issuer)) != null ? _a : url ? findPnpApi(specifier) : null;
if (!pnpapi)
return defaultResolver(originalSpecifier, context, defaultResolver);
const dependencyNameMatch = specifier.match(pathRegExp);
let allowLegacyResolve = false;
if (dependencyNameMatch) {
const [, dependencyName, subPath] = dependencyNameMatch;
if (subPath === ``) {
const resolved = pnpapi.resolveToUnqualified(`${dependencyName}/package.json`, issuer);
if (resolved) {
const content = await tryReadFile(resolved);
if (content) {
const pkg = JSON.parse(content);
allowLegacyResolve = pkg.exports == null;
}
}
}
}
const result = pnpapi.resolveRequest(specifier, issuer, {
conditions: new Set(conditions),
extensions: allowLegacyResolve ? void 0 : []
});
if (!result)
throw new Error(`Resolving '${specifier}' from '${issuer}' failed`);
const resultURL = pathToFileURL(result);
if (url) {
resultURL.search = url.search;
resultURL.hash = url.hash;
}
return {
url: resultURL.href
};
}
const binding = process.binding(`fs`);
const originalfstat = binding.fstat;
const ZIP_FD = 2147483648;
binding.fstat = function(...args) {
const [fd, useBigint, req] = args;
if ((fd & ZIP_FD) !== 0 && useBigint === false && req === void 0) {
try {
const stats = fs.fstatSync(fd);
return new Float64Array([
stats.dev,
stats.mode,
stats.nlink,
stats.uid,
stats.gid,
stats.rdev,
stats.blksize,
stats.ino,
stats.size,
stats.blocks
]);
} catch {
}
}
return originalfstat.apply(this, args);
};
const [major, minor] = process.versions.node.split(`.`).map((value) => parseInt(value, 10));
const hasConsolidatedHooks = major > 16 || major === 16 && minor >= 12;
const resolve = resolve$1;
const getFormat = hasConsolidatedHooks ? void 0 : getFormat$1;
const getSource = hasConsolidatedHooks ? void 0 : getSource$1;
const load = hasConsolidatedHooks ? load$1 : void 0;
export { getFormat, getSource, load, resolve };

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More