mirror of https://github.com/nodejs/corepack.git
Refactors pmm -> corepack
This commit is contained in:
parent
8c494c6cae
commit
9809b8f856
|
|
@ -0,0 +1,5 @@
|
|||
module.exports = {
|
||||
extends: [
|
||||
`@yarnpkg`,
|
||||
],
|
||||
};
|
||||
|
|
@ -21,20 +21,20 @@ jobs:
|
|||
with:
|
||||
node-version: 14.x
|
||||
|
||||
- name: 'Build Node with pmm master'
|
||||
- name: 'Build Node with corepack master'
|
||||
run: |
|
||||
yarn pack
|
||||
git clone -b mael/pmm --depth=1 https://github.com/arcanis/node.git node && cd node
|
||||
git config user.name 'John Doe'
|
||||
git config user.email 'john@example.org'
|
||||
rm -rf deps/pmm && tar xvf ../package.tgz && mv package deps/pmm
|
||||
git add . && git commit -m 'Updates pmm'
|
||||
rm -rf deps/corepack && tar xvf ../package.tgz && mv package deps/corepack
|
||||
git add . && git commit -m 'Updates corepack'
|
||||
./configure
|
||||
DISTTYPE=nightly DATESTRING=YYYY-MM-DD COMMIT=XXXX make binary -j8
|
||||
mv node-v15.0.0-nightlyYYYY-MM-DDXXXX-*.tar.gz node-pmm-${{matrix.platform[0]}}-x64.tar.gz
|
||||
mv node-v15.0.0-nightlyYYYY-MM-DDXXXX-*.tar.gz node-corepack-${{matrix.platform[0]}}-x64.tar.gz
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: node-pmm-${{matrix.platform[0]}}-x64
|
||||
path: node/node-pmm-${{matrix.platform[0]}}-x64.tar.gz
|
||||
name: node-corepack-${{matrix.platform[0]}}-x64
|
||||
path: node/node-corepack-${{matrix.platform[0]}}-x64.tar.gz
|
||||
|
|
|
|||
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"recommendations": [
|
||||
"arcanis.vscode-zipfs",
|
||||
"dbaeumer.vscode-eslint"
|
||||
]
|
||||
}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.js";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
// Setup the environment to be able to require typescript/bin/tsc
|
||||
require(absPnpApiPath).setup();
|
||||
|
||||
// Defer to the real typescript/bin/tsc your application uses
|
||||
module.exports = absRequire(`typescript/bin/tsc`);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.js";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
// Setup the environment to be able to require typescript/bin/tsserver
|
||||
require(absPnpApiPath).setup();
|
||||
|
||||
// Defer to the real typescript/bin/tsserver your application uses
|
||||
module.exports = absRequire(`typescript/bin/tsserver`);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.js";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
// Setup the environment to be able to require typescript/lib/tsc.js
|
||||
require(absPnpApiPath).setup();
|
||||
|
||||
// Defer to the real typescript/lib/tsc.js your application uses
|
||||
module.exports = absRequire(`typescript/lib/tsc.js`);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.js";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
// Setup the environment to be able to require typescript/lib/tsserver.js
|
||||
require(absPnpApiPath).setup();
|
||||
|
||||
// Defer to the real typescript/lib/tsserver.js your application uses
|
||||
module.exports = absRequire(`typescript/lib/tsserver.js`);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const {createRequire, createRequireFromPath} = require(`module`);
|
||||
const {resolve} = require(`path`);
|
||||
|
||||
const relPnpApiPath = "../../../../.pnp.js";
|
||||
|
||||
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
|
||||
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
|
||||
|
||||
// Setup the environment to be able to require typescript/lib/typescript.js
|
||||
require(absPnpApiPath).setup();
|
||||
|
||||
// Defer to the real typescript/lib/typescript.js your application uses
|
||||
module.exports = absRequire(`typescript/lib/typescript.js`);
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"name": "typescript",
|
||||
"version": "3.8.3-pnpify",
|
||||
"main": "./lib/typescript.js",
|
||||
"type": "commonjs"
|
||||
}
|
||||
|
|
@ -1,3 +1,9 @@
|
|||
{
|
||||
"typescript.tsdk": ".vscode/pnpify/typescript/lib"
|
||||
"typescript.tsdk": "/Users/mael.nison/pmpm/.yarn/sdks/typescript/lib",
|
||||
"search.exclude": {
|
||||
"**/.yarn": true,
|
||||
"**/.pnp.*": true
|
||||
},
|
||||
"eslint.nodePath": ".yarn/sdks",
|
||||
"typescript.enablePromptUseWorkspaceTsdk": true
|
||||
}
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/@typescript-eslint-eslint-plugin-npm-2.34.0-f789694d5f-8d800f4726.zip
vendored
Normal file
BIN
.yarn/cache/@typescript-eslint-eslint-plugin-npm-2.34.0-f789694d5f-8d800f4726.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@typescript-eslint-experimental-utils-npm-2.34.0-c95189aa27-53cbbcfe67.zip
vendored
Normal file
BIN
.yarn/cache/@typescript-eslint-experimental-utils-npm-2.34.0-c95189aa27-53cbbcfe67.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/@typescript-eslint-scope-manager-npm-4.2.0-0a48527c8c-3272b80975.zip
vendored
Normal file
BIN
.yarn/cache/@typescript-eslint-scope-manager-npm-4.2.0-0a48527c8c-3272b80975.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/@typescript-eslint-typescript-estree-npm-2.34.0-c6a349caae-77d1a758df.zip
vendored
Normal file
BIN
.yarn/cache/@typescript-eslint-typescript-estree-npm-2.34.0-c6a349caae-77d1a758df.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@typescript-eslint-typescript-estree-npm-4.2.0-31b66c0518-5186565a8e.zip
vendored
Normal file
BIN
.yarn/cache/@typescript-eslint-typescript-estree-npm-4.2.0-31b66c0518-5186565a8e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@typescript-eslint-visitor-keys-npm-4.2.0-1439678e23-a8cfced7b0.zip
vendored
Normal file
BIN
.yarn/cache/@typescript-eslint-visitor-keys-npm-4.2.0-1439678e23-a8cfced7b0.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/json-stable-stringify-without-jsonify-npm-1.0.1-b65772b28b-a01b6c6541.zip
vendored
Normal file
BIN
.yarn/cache/json-stable-stringify-without-jsonify-npm-1.0.1-b65772b28b-a01b6c6541.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,77 @@
|
|||
## What problem does it solve?
|
||||
|
||||
Various problems arise from npm being the only package manager shipped by default:
|
||||
|
||||
- Projects using popular package management solutions other than npm (particularly Yarn and pnpm) require additional installation step that must often be repeated when switching between Node versions. This lead to a significant part of the Node userbase effectively being a second-class citizen, which sounds unfortunate.
|
||||
|
||||
- Because one package manager currently holds a special treatment, users are more likely to pick it even if they would choose another solution should they have the choice (it really depends on how they balance the tradeoffs, but sometimes they value simplicity over purely technical factors). This artificial barrier hurts our community by making it harder to pick the right tool for the job.
|
||||
|
||||
- Having a single official package manager means that all the keys belong to a single player which can do whatever it pleases with it (even the Node project only has a limited influence over it, since removing the unique package manager would be poorly accepted by the community). Spreading these responsibilities over multiple projects gives less power to each, ensuring that everyone behave well.
|
||||
|
||||
Discussion thread: https://github.com/nodejs/node/issues/15244
|
||||
|
||||
## Envisioned workflow
|
||||
|
||||
1. Users would install Node as usual.
|
||||
|
||||
2. Node would be distributed slightly differently:
|
||||
|
||||
- Pmm would be included by Node out of the box.
|
||||
|
||||
- The full npm package wouldn't be included out of the box anymore (this might be an incremental move, with first a major version shipping pmm + npm, and the next one discarding npm).
|
||||
|
||||
- **However**, the Node distribution would include jump binaries for all three main package managers (`yarn`, `npm`, and `pnpm`) that would simply delegate to `pmm <package manager name>`. Pmm would then handle the install logic by following the logic described in later sections.
|
||||
|
||||
- Pmm could potentially be distributed as a Node subcommand rather than a standalone binary. In this case, commands in this document (such as `pmm install <name@version>`) would be replaced by `node --pmm install <name@version>` (or any other variant).
|
||||
|
||||
3. Regular users would keep using the `yarn` / `npm` / `pnpm` global binaries just like they are used to. The one difference is that the package manager implementations would be lazily downloaded, without having to be manually installed (because the global jumpers would be included in the Node distribution, cf previous point).
|
||||
|
||||
- Projects that don't list the `engines.pm` field would allow any package manager, and Pmm would install them based on predefined versions. Those versions will be frozen in time within Pmm itself to "known good values". For example, the default npm version could be 6.14.5, and the default Yarn one 1.22.4. Users that would want to upgrade to higher versions would just have to update the `engines.pm` field (cf next section).
|
||||
|
||||
4. Project authors would most of the time only have to care about the binaries as well, but they would be able to upgrade package manager versions simply by changing the versions set in the `engines.pm` field.
|
||||
|
||||
- Pmm could reasonably provide some kind of basic CLI interface to select a version to upgrade to in a few keystrokes (similar to what `emsdk` does for the [emscripten toolchain](https://github.com/emscripten-core/emsdk#how-do-i-check-for-updates-to-the-emscripten-sdk), or what [nvm](https://github.com/nvm-sh/nvm) does for Node releases).
|
||||
|
||||
5. Docker users would follow a similar workflow to other users; the default image would run network queries to install the right package manager for the project being installed.
|
||||
|
||||
- However, users with strong offline requirements would be able to run the `pmm install <name@version>` command when preparing their images. It would ensure that the requested package manager is made available for later use.
|
||||
|
||||
- Network access could be disabled entirely by setting `PMM_ENABLE_NETWORK=0` in the environmen - Pmm would then only use the package managers that got installed by prior `pmm install` calls.
|
||||
|
||||
6. Package manager maintainers would submit a PR to the Node repository each time they wish for a new version to be made available through Pmm (can be easily automated using a GitHub Action on each of our repositories). Merging the PR would instantly make the new version available to Node users (once they upgrade).
|
||||
|
||||
## How does it work?
|
||||
|
||||
When any of the embed binaries are called (whether it's `yarn`, `npm`, or `pnpm`), the tool will find the closest ancestor `package.json` for the current directory. It will then extract the `engines.pm` key, configured as such:
|
||||
|
||||
```json
|
||||
{
|
||||
"engines": {
|
||||
"pm": "yarn@^2.0.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The tool will then check whether it got called via the right binary endpoint (`npm` or `npx` when the package manager is configured for npm, `yarn` when configured for Yarn, etc), and will report an error otherwise. This ensures that we can't accidentally call, say, pnpm on an npm project (which would otherwise lead to diverging environments since the lockfiles and features wouldn't be the same depending on the interpreting package managers).
|
||||
|
||||
If the check succeeded, the tool will check whether a compatible package manager has been installed (they're all stored on the disk in the local user's home folder). If not, it will install the latest matching release (based on the information dynamically retrieved from [`versions.json`](/versions.json)). Once it has ensured that a version exists, it'll forward the call to it.
|
||||
|
||||
## Frequently asked questions
|
||||
|
||||
**Why not just ask the user which package manager they want to use when installing Node?**
|
||||
|
||||
Whether to use npm or Yarn or pnpm isn't up to the user but to each individual project. Different projects leverage different features from different package managers. For example one project might rely on the Yarn workspaces, whereas another has setup their repository with pnpm in mind.
|
||||
|
||||
**How would things work with global packages?**
|
||||
|
||||
Nothing would change in the context of this particular proposal. Npm would keep installing its globals alongside Node, and Yarn would keep installing them into the user's home directory.
|
||||
|
||||
**Why not just keep only npm?**
|
||||
|
||||
While npm is favored by the majority of the ecosystem, a significant portion decided to use different tools. Their use cases deserve to be heard rather than be discarded simply because a slightly higher percentage of users happens not to directly benefit from it. Additionally, keeping powers balanced is important - even more so given that npm is a corporate entity with little oversight.
|
||||
|
||||
From the npm perspective, a project such as Pmm would also have its benefits: projects regularly break when upgrading from one Node version to another because of npm being upgraded as well. By pinning the package manager version, they would ensure that their users only upgrade when they are ready to, decreasing accidental frustration.
|
||||
|
||||
## Known issues
|
||||
|
||||
- The `pnpx` and `npx` binaries can only be called from within pnpm and npm projects, respectively. This is because otherwise we cannot infer the package manager version from the local manifest, as it would list another package manager instead. Fixing that is possible if we include "global installs" features inside pmm (so that we would fallback to the global `npx` in those circumstances). It seemed out of scope for the initial prototype, but we certainly can discuss it in an issue.
|
||||
171
README.md
171
README.md
|
|
@ -1,16 +1,42 @@
|
|||
# <img src="./icon.svg" height="25" /> pmm
|
||||
# <img src="./icon.svg" height="25" /> corepack
|
||||
|
||||
> A package manager manager - "This seal manages to keep the ball in balance on its nose."
|
||||
Corepack is a zero-runtime-dependency Node script that acts as a bridge between Node projects and the package managers they are intended to be used with during development.
|
||||
|
||||
*Note: the `pmm` name (and other names) are temporary placeholders. This document mostly aims to address the design of the feature.*
|
||||
**Important:** At the moment, Corepack only covers Yarn and pnpm. Given that we have little control on the npm project, we prefer to focus on the Yarn and pnpm use cases. As a result, Corepack doesn't have any effect at all on the way you use npm.
|
||||
|
||||
## Usage
|
||||
## How to Install
|
||||
|
||||
Any of the following will work:
|
||||
### Default Installs
|
||||
|
||||
### Prebuilt node
|
||||
Corepack isn't intended to be installed manually. While it's certainly possible, we're working with the Node TSC to provide Corepack by default starting from Node 15, thus ensuring that all package managers can be used with little to no friction.
|
||||
|
||||
We have a few prebuilt Node binaries (based on the [following branch](https://github.com/arcanis/node/tree/mael/pmm)) that you can just download, unpack somewhere, and add to your `PATH` environment variable. It's likely the easiest way to get started!
|
||||
### Manual Installs
|
||||
|
||||
<details>
|
||||
<summary>Click here to see how to install Corepack using npm</summary>
|
||||
|
||||
First uninstall your global Yarn and pnpm binaries (just leave npm). In general, you'd do this by running the following command:
|
||||
|
||||
```
|
||||
npm uninstall -g yarn pnpm
|
||||
```
|
||||
|
||||
Then install Corepack:
|
||||
|
||||
```
|
||||
npm install -g corepack
|
||||
```
|
||||
|
||||
We do acknowledge the irony of using npm to install Corepack, which is why the preferred option is to use the Corepack version that will be distributed along with Node itself.
|
||||
|
||||
</details>
|
||||
|
||||
### Prebuilt Binaries
|
||||
|
||||
<details>
|
||||
<summary>Click here to see how to download prebuilt Corepack Node distributions</summary>
|
||||
|
||||
We have a few prebuilt Node binaries (based on the [following branch](https://github.com/arcanis/node/tree/mael/pmm)) that you can just download, unpack somewhere, and add to your `PATH` environment variable.
|
||||
|
||||
1. Go to [this page](https://github.com/arcanis/pmm/actions?query=workflow%3ABuild)
|
||||
2. Open the latest build (the one at the top)
|
||||
|
|
@ -18,20 +44,57 @@ We have a few prebuilt Node binaries (based on the [following branch](https://gi
|
|||
4. Unzip the artifact, then untar it
|
||||
5. Add the `node-v15.0.0-nightlyYYYY-MM-DDXXXX-linux-x64/bin` directory to your `$PATH`
|
||||
|
||||
### Docker
|
||||
</details>
|
||||
|
||||
1. `docker build -t pmm https://github.com/arcanis/pmm.git\#master:docker`
|
||||
2. `docker run -it pmm /bin/sh`
|
||||
## Usage
|
||||
|
||||
3. Do whatever you want! The `node` / `npm` / `yarn` / `pnpm` binaries are there, via pmm. To take a look at the different workflows, here are some things you can try:
|
||||
Just use your package managers as you usually would. Run `yarn install` in Yarn projects, `pnpm install` in pnpm projects, and `npm` in npm projects. Corepack will catch these calls, and depending on the situation:
|
||||
|
||||
- Create a new folder, then run `yarn init`.
|
||||
- Then try to run `npm install` in this same folder.
|
||||
- Then open the package.json, and change the `packageManager` field to `1.10.0`. Try running `yarn --version`. See how fast it is?
|
||||
- **If the local project is configured for the package manager you're using**, Corepack will silently download and cache the latest compatible version.
|
||||
|
||||
### Manual build
|
||||
- **If the local project is configured for a different package manager**, Corepack will request you to run the command again using the right package manager - thus avoiding corruptions of your install artifacts.
|
||||
|
||||
If you want to do things yourself, you can build the project like this:
|
||||
- **If the local project isn't configured for any package manager**, Corepack will assume that you know what you're doing, and will use whatever package manager version has been pinned as "known good release". Check the relevant section for more details.
|
||||
|
||||
## Known Good Releases
|
||||
|
||||
When running Yarn or pnpm within projects that don't list a supported package manager, Corepack will default to a set of Known Good Releases. In a way, you can compare this to Node, where each version ships with a specific version of npm.
|
||||
|
||||
The Known Good Releases can be updated system-wide using the `--activate` flag from the `corepack prepare` and `corepack hydrate` commands.
|
||||
|
||||
## Offline Workflow
|
||||
|
||||
The utility commands detailed in the next section.
|
||||
|
||||
- Either you can use the network while building your container image, in which case you'll simply run `corepack prepare --cache-only <name>` to make sure that your image includes the Last Known Good release for the specified package manager.
|
||||
|
||||
- If you want to have *all* Last Known Good releases for all package managers, just use the `--all` flag which will do just that.
|
||||
|
||||
- Or you're publishing your project to a system where the network is unavailable, in which case you'll preemptively generate a package manager archive from your local computer (using `corepack prepare`) before storing it somewhere your container will be able to access (for example within your repository). After that, it's just a matter of running `corepack hydrate <path/to/corepack>` to setup the cache.
|
||||
|
||||
## Utility Commands
|
||||
|
||||
### `corepack prepare [name@version]`
|
||||
|
||||
| Option | Description |
|
||||
| --- | --- |
|
||||
| `--all` | Prepare the "Last Known Good" version of all supported package managers |
|
||||
| `--cache-only` | Just populate the cache, don't generate an archive |
|
||||
| `--activate` | Also update the "Last Known Good" release |
|
||||
|
||||
This command will download the given package manager (or the one configured for the local project if no argument is passed in parameter) and store it within the Corepack cache. Unless the `--cache-only` flag is set, an archive will also be generated that can be used by the `corepack hydrate` command.
|
||||
|
||||
### `corepack hydrate <path/to/corepack.tgz>`
|
||||
|
||||
| Option | Description |
|
||||
| --- | --- |
|
||||
| `--activate` | Also update the "Last Known Good" release |
|
||||
|
||||
This command will retrieve the given package manager from the specified archive and will install it within the Corepack cache, ready to be used without further network interaction.
|
||||
|
||||
## Contributing
|
||||
|
||||
If you want to build corepack yourself things yourself, you can build the project like this:
|
||||
|
||||
1. Clone this repository
|
||||
2. Run `yarn build` (no need for `yarn install`)
|
||||
|
|
@ -40,81 +103,9 @@ If you want to do things yourself, you can build the project like this:
|
|||
|
||||
You can also run the tests with `yarn jest` (still no install needed).
|
||||
|
||||
## What problem does it solve?
|
||||
## Design
|
||||
|
||||
Various problems arise from npm being the only package manager shipped by default:
|
||||
|
||||
- Projects using popular package management solutions other than npm (particularly Yarn and pnpm) require additional installation step that must often be repeated when switching between Node versions. This lead to a significant part of the Node userbase effectively being a second-class citizen, which sounds unfortunate.
|
||||
|
||||
- Because one package manager currently holds a special treatment, users are more likely to pick it even if they would choose another solution should they have the choice (it really depends on how they balance the tradeoffs, but sometimes they value simplicity over purely technical factors). This artificial barrier hurts our community by making it harder to pick the right tool for the job.
|
||||
|
||||
- Having a single official package manager means that all the keys belong to a single player which can do whatever it pleases with it (even the Node project only has a limited influence over it, since removing the unique package manager would be poorly accepted by the community). Spreading these responsibilities over multiple projects gives less power to each, ensuring that everyone behave well.
|
||||
|
||||
Discussion thread: https://github.com/nodejs/node/issues/15244
|
||||
|
||||
## Envisioned workflow
|
||||
|
||||
1. Users would install Node as usual.
|
||||
|
||||
2. Node would be distributed slightly differently:
|
||||
|
||||
- Pmm would be included by Node out of the box.
|
||||
|
||||
- The full npm package wouldn't be included out of the box anymore (this might be an incremental move, with first a major version shipping pmm + npm, and the next one discarding npm).
|
||||
|
||||
- **However**, the Node distribution would include jump binaries for all three main package managers (`yarn`, `npm`, and `pnpm`) that would simply delegate to `pmm <package manager name>`. Pmm would then handle the install logic by following the logic described in later sections.
|
||||
|
||||
- Pmm could potentially be distributed as a Node subcommand rather than a standalone binary. In this case, commands in this document (such as `pmm install <name@version>`) would be replaced by `node --pmm install <name@version>` (or any other variant).
|
||||
|
||||
3. Regular users would keep using the `yarn` / `npm` / `pnpm` global binaries just like they are used to. The one difference is that the package manager implementations would be lazily downloaded, without having to be manually installed (because the global jumpers would be included in the Node distribution, cf previous point).
|
||||
|
||||
- Projects that don't list the `packageManager` field would allow any package manager, and Pmm would install them based on predefined versions. Those versions will be frozen in time within Pmm itself to "known good values". For example, the default npm version could be 6.14.5, and the default Yarn one 1.22.4. Users that would want to upgrade to higher versions would just have to update the `packageManager` field (cf next section).
|
||||
|
||||
4. Project authors would most of the time only have to care about the binaries as well, but they would be able to upgrade package manager versions simply by changing the versions set in the `packageManager` field.
|
||||
|
||||
- Pmm could reasonably provide some kind of basic CLI interface to select a version to upgrade to in a few keystrokes (similar to what `emsdk` does for the [emscripten toolchain](https://github.com/emscripten-core/emsdk#how-do-i-check-for-updates-to-the-emscripten-sdk), or what [nvm](https://github.com/nvm-sh/nvm) does for Node releases).
|
||||
|
||||
5. Docker users would follow a similar workflow to other users; the default image would run network queries to install the right package manager for the project being installed.
|
||||
|
||||
- However, users with strong offline requirements would be able to run the `pmm install <name@version>` command when preparing their images. It would ensure that the requested package manager is made available for later use.
|
||||
|
||||
- Network access could be disabled entirely by setting `PMM_ENABLE_NETWORK=0` in the environmen - Pmm would then only use the package managers that got installed by prior `pmm install` calls.
|
||||
|
||||
6. Package manager maintainers would submit a PR to the Node repository each time they wish for a new version to be made available through Pmm (can be easily automated using a GitHub Action on each of our repositories). Merging the PR would instantly make the new version available to Node users (once they upgrade).
|
||||
|
||||
## How does it work?
|
||||
|
||||
When any of the embed binaries are called (whether it's `yarn`, `npm`, or `pnpm`), the tool will find the closest ancestor `package.json` for the current directory. It will then extract the `packageManager` key, configured as such:
|
||||
|
||||
```json
|
||||
{
|
||||
"packageManager": "yarn@^2.0.0"
|
||||
}
|
||||
```
|
||||
|
||||
The tool will then check whether it got called via the right binary endpoint (`npm` or `npx` when the package manager is configured for npm, `yarn` when configured for Yarn, etc), and will report an error otherwise. This ensures that we can't accidentally call, say, pnpm on an npm project (which would otherwise lead to diverging environments since the lockfiles and features wouldn't be the same depending on the interpreting package managers).
|
||||
|
||||
If the check succeeded, the tool will check whether a compatible package manager has been installed (they're all stored on the disk in the local user's home folder). If not, it will install the latest matching release (based on the information dynamically retrieved from [`versions.json`](/versions.json)). Once it has ensured that a version exists, it'll forward the call to it.
|
||||
|
||||
## Frequently asked questions
|
||||
|
||||
**Why not just ask the user which package manager they want to use when installing Node?**
|
||||
|
||||
Whether to use npm or Yarn or pnpm isn't up to the user but to each individual project. Different projects leverage different features from different package managers. For example one project might rely on the Yarn workspaces, whereas another has setup their repository with pnpm in mind.
|
||||
|
||||
**How would things work with global packages?**
|
||||
|
||||
Nothing would change in the context of this particular proposal. Npm would keep installing its globals alongside Node, and Yarn would keep installing them into the user's home directory.
|
||||
|
||||
**Why not just keep only npm?**
|
||||
|
||||
While npm is favored by the majority of the ecosystem, a significant portion decided to use different tools. Their use cases deserve to be heard rather than be discarded simply because a slightly higher percentage of users happens not to directly benefit from it. Additionally, keeping powers balanced is important - even more so given that npm is a corporate entity with little oversight.
|
||||
|
||||
From the npm perspective, a project such as Pmm would also have its benefits: projects regularly break when upgrading from one Node version to another because of npm being upgraded as well. By pinning the package manager version, they would ensure that their users only upgrade when they are ready to, decreasing accidental frustration.
|
||||
|
||||
## Known issues
|
||||
|
||||
- The `pnpx` and `npx` binaries can only be called from within pnpm and npm projects, respectively. This is because otherwise we cannot infer the package manager version from the local manifest, as it would list another package manager instead. Fixing that is possible if we include "global installs" features inside pmm (so that we would fallback to the global `npx` in those circumstances). It seemed out of scope for the initial prototype, but we certainly can discuss it in an issue.
|
||||
Various tidbits about Corepack's design are explained in more details in [DESIGN.md](/DESIGN.md).
|
||||
|
||||
## License (MIT)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
module.exports = {
|
||||
presets: [
|
||||
`@babel/preset-typescript`,
|
||||
],
|
||||
plugins: [
|
||||
[`@babel/plugin-proposal-decorators`, {legacy: true}],
|
||||
[`@babel/plugin-proposal-class-properties`, {loose: true}],
|
||||
[`@babel/plugin-transform-modules-commonjs`],
|
||||
],
|
||||
presets: [
|
||||
`@babel/preset-typescript`,
|
||||
],
|
||||
plugins: [
|
||||
[`@babel/plugin-proposal-decorators`, {legacy: true}],
|
||||
[`@babel/plugin-proposal-class-properties`, {loose: true}],
|
||||
[`@babel/plugin-transform-modules-commonjs`],
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ FROM alpine:3.10
|
|||
|
||||
RUN apk add --no-cache tar xz libstdc++
|
||||
|
||||
RUN mkdir -p /opt/node && mkdir -p /opt/pmm
|
||||
RUN mkdir -p /opt/node && mkdir -p /opt/corepack
|
||||
|
||||
RUN wget https://unofficial-builds.nodejs.org/download/release/v14.2.0/node-v14.2.0-linux-x64-musl.tar.xz -O - | tar -xJ --strip-components=1 -C /opt/node && ls -l /opt/node
|
||||
|
||||
|
|
@ -11,6 +11,6 @@ RUN rm -rf /opt/node/lib /opt/node/bin/npm /opt/node/bin/npx
|
|||
ENV PATH="/opt/node/bin:$PATH"
|
||||
RUN which node && node --version
|
||||
|
||||
RUN wget https://github.com/arcanis/pmm/archive/master.tar.gz -O - | tar -xz --strip-components=1 -C /opt/pmm && cd /opt/pmm && node ./.yarn/releases/yarn-*.js build
|
||||
RUN wget https://github.com/arcanis/pmm/archive/master.tar.gz -O - | tar -xz --strip-components=1 -C /opt/corepack && cd /opt/corepack && node ./.yarn/releases/yarn-*.js build
|
||||
|
||||
ENV PATH="/opt/pmm/shims:$PATH"
|
||||
ENV PATH="/opt/corepack/shims:$PATH"
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ import semver from 'semver';
|
|||
const lines = readFileSync(0, `utf8`).split(/\n/).filter(line => line);
|
||||
|
||||
lines.sort((a, b) => {
|
||||
return semver.compare(a, b);
|
||||
return semver.compare(a, b);
|
||||
});
|
||||
|
||||
for (const version of lines) {
|
||||
console.log(`"${version}": "${process.argv[2]}",`);
|
||||
}
|
||||
for (const version of lines)
|
||||
console.log(`"${version}": "${process.argv[2]}",`);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
module.exports = {
|
||||
testTimeout: 30000
|
||||
testTimeout: 30000,
|
||||
};
|
||||
|
|
|
|||
62
mkshims.ts
62
mkshims.ts
|
|
@ -1,43 +1,43 @@
|
|||
import cmdShim from '@zkochan/cmd-shim';
|
||||
import fs from 'fs';
|
||||
|
||||
import config from './config.json';
|
||||
import cmdShim from '@zkochan/cmd-shim';
|
||||
import fs from 'fs';
|
||||
import {SupportedPackageManagers} from 'sources/types';
|
||||
|
||||
import config from './config.json';
|
||||
|
||||
async function main() {
|
||||
for (const packageManager of Object.keys(config.definitions) as SupportedPackageManagers[]) {
|
||||
const binSet = new Set<string>();
|
||||
for (const packageManager of Object.keys(config.definitions) as Array<SupportedPackageManagers>) {
|
||||
const binSet = new Set<string>();
|
||||
|
||||
for (const spec of Object.values(config.definitions[packageManager].ranges)) {
|
||||
if (Array.isArray(spec.bin)) {
|
||||
for (const entry of spec.bin) {
|
||||
binSet.add(entry);
|
||||
}
|
||||
} else {
|
||||
for (const entry of Object.keys(spec.bin)) {
|
||||
binSet.add(entry);
|
||||
}
|
||||
}
|
||||
for (const spec of Object.values(config.definitions[packageManager].ranges)) {
|
||||
if (Array.isArray(spec.bin)) {
|
||||
for (const entry of spec.bin) {
|
||||
binSet.add(entry);
|
||||
}
|
||||
|
||||
for (const binaryName of binSet) {
|
||||
const entryPath = `${__dirname}/dist/${binaryName}.js`;
|
||||
const entryScript = [
|
||||
`#!/usr/bin/env node\n`,
|
||||
`require('./pmm').runMain(['${packageManager}', '${binaryName}', ...process.argv.slice(2)]);\n`,
|
||||
].join(``);
|
||||
|
||||
fs.writeFileSync(entryPath, entryScript);
|
||||
fs.chmodSync(entryPath, 0o755);
|
||||
|
||||
await cmdShim(entryPath, `${__dirname}/shims/${binaryName}`, {});
|
||||
} else {
|
||||
for (const entry of Object.keys(spec.bin)) {
|
||||
binSet.add(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`All shims have been generated.`);
|
||||
for (const binaryName of binSet) {
|
||||
const entryPath = `${__dirname}/dist/${binaryName}.js`;
|
||||
const entryScript = [
|
||||
`#!/usr/bin/env node\n`,
|
||||
`require('./corepack').runMain(['${packageManager}', '${binaryName}', ...process.argv.slice(2)]);\n`,
|
||||
].join(``);
|
||||
|
||||
fs.writeFileSync(entryPath, entryScript);
|
||||
fs.chmodSync(entryPath, 0o755);
|
||||
|
||||
await cmdShim(entryPath, `${__dirname}/shims/${binaryName}`, {});
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`All shims have been generated.`);
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.log(err.stack);
|
||||
process.exitCode = 1;
|
||||
console.log(err.stack);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
|
|
|
|||
13
package.json
13
package.json
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"name": "@arcanis/pmm",
|
||||
"name": "corepack",
|
||||
"version": "0.1.0",
|
||||
"bin": "./sources/main.ts",
|
||||
"packageManager": "yarn@^2.0.0-rc.29",
|
||||
|
|
@ -14,11 +14,16 @@
|
|||
"@types/node": "^13.9.2",
|
||||
"@types/semver": "^7.1.0",
|
||||
"@types/tar": "^4.0.3",
|
||||
"@typescript-eslint/eslint-plugin": "^2.0.0",
|
||||
"@typescript-eslint/parser": "^4.2.0",
|
||||
"@yarnpkg/eslint-config": "^0.1.0",
|
||||
"@yarnpkg/fslib": "^2.1.0",
|
||||
"@zkochan/cmd-shim": "^5.0.0",
|
||||
"clipanion": "^2.4.4",
|
||||
"debug": "^4.1.1",
|
||||
"enquirer": "^2.3.6",
|
||||
"eslint": "^7.10.0",
|
||||
"eslint-plugin-arca": "^0.9.5",
|
||||
"jest": "^25.1.0",
|
||||
"semver": "^7.1.3",
|
||||
"supports-color": "^7.1.0",
|
||||
|
|
@ -32,7 +37,7 @@
|
|||
},
|
||||
"scripts": {
|
||||
"build": "rm -rf dist && yarn webpack && yarn ts-node ./mkshims.ts",
|
||||
"pmm": "ts-node ./sources/main.ts",
|
||||
"corepack": "ts-node ./sources/main.ts",
|
||||
"prepack": "yarn build",
|
||||
"postpack": "rm -rf dist shims"
|
||||
},
|
||||
|
|
@ -41,7 +46,7 @@
|
|||
"shims"
|
||||
],
|
||||
"publishConfig": {
|
||||
"bin": "./dist/pmm.js",
|
||||
"bin": "./dist/corepack.js",
|
||||
"executableFiles": [
|
||||
"./dist/npm.js",
|
||||
"./dist/npx.js",
|
||||
|
|
@ -49,7 +54,7 @@
|
|||
"./dist/pnpx.js",
|
||||
"./dist/yarn.js",
|
||||
"./dist/yarnpkg.js",
|
||||
"./dist/pmm.js",
|
||||
"./dist/corepack.js",
|
||||
"./shims/npm",
|
||||
"./shims/npm.ps1",
|
||||
"./shims/npx",
|
||||
|
|
|
|||
|
|
@ -1,72 +1,123 @@
|
|||
import {UsageError, Definition} from 'clipanion';
|
||||
import semver from 'semver';
|
||||
import {UsageError} from 'clipanion';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import semver from 'semver';
|
||||
|
||||
import defaultConfig from '../config.json';
|
||||
import defaultConfig from '../config.json';
|
||||
|
||||
import * as folderUtils from './folderUtils';
|
||||
import * as pmmUtils from './pmmUtils';
|
||||
import {Config, Descriptor, Locator, SupportedPackageManagers} from './types';
|
||||
import * as folderUtils from './folderUtils';
|
||||
import * as pmmUtils from './pmmUtils';
|
||||
import {Config, Descriptor, Locator, SupportedPackageManagers, SupportedPackageManagerSet} from './types';
|
||||
|
||||
|
||||
export class Engine {
|
||||
constructor(private config: Config = defaultConfig as Config) {
|
||||
constructor(private config: Config = defaultConfig as Config) {
|
||||
}
|
||||
|
||||
async getDefaultDescriptors() {
|
||||
const locators: Array<Descriptor> = [];
|
||||
|
||||
for (const name of SupportedPackageManagerSet as Set<SupportedPackageManagers>)
|
||||
locators.push({name, range: await this.getDefaultVersion(name)});
|
||||
|
||||
return locators;
|
||||
}
|
||||
|
||||
async getDefaultVersion(packageManager: SupportedPackageManagers) {
|
||||
const definition = this.config.definitions[packageManager];
|
||||
if (typeof definition === `undefined`)
|
||||
throw new UsageError(`This package manager (${packageManager}) isn't supported by this corepack build`);
|
||||
|
||||
let lastKnownGood: unknown;
|
||||
try {
|
||||
lastKnownGood = JSON.parse(await fs.promises.readFile(this.getLastKnownGoodFile(), `utf8`));
|
||||
} catch {
|
||||
// Ignore errors; too bad
|
||||
}
|
||||
|
||||
getDefaultVersion(packageManager: SupportedPackageManagers) {
|
||||
const definition = this.config.definitions[packageManager];
|
||||
if (typeof definition === `undefined`)
|
||||
throw new UsageError(`This package manager (${packageManager}) isn't supported by this pmm build`);
|
||||
if (typeof lastKnownGood !== `object` || lastKnownGood === null)
|
||||
return definition.default;
|
||||
|
||||
return definition.default;
|
||||
if (!Object.prototype.hasOwnProperty.call(lastKnownGood, packageManager))
|
||||
return definition.default;
|
||||
|
||||
const override = (lastKnownGood as any)[packageManager];
|
||||
if (typeof override !== `string`)
|
||||
return definition.default;
|
||||
|
||||
return override;
|
||||
}
|
||||
|
||||
async activatePackageManager(locator: Locator) {
|
||||
const lastKnownGoodFile = this.getLastKnownGoodFile();
|
||||
|
||||
let lastKnownGood;
|
||||
try {
|
||||
lastKnownGood = JSON.parse(await fs.promises.readFile(lastKnownGoodFile, `utf8`));
|
||||
} catch {
|
||||
// Ignore errors; too bad
|
||||
}
|
||||
|
||||
async ensurePackageManager(locator: Locator) {
|
||||
const definition = this.config.definitions[locator.name];
|
||||
if (typeof definition === `undefined`)
|
||||
throw new UsageError(`This package manager (${locator.name}) isn't supported by this pmm build`);
|
||||
if (typeof lastKnownGood !== `object` || lastKnownGood === null)
|
||||
lastKnownGood = {};
|
||||
|
||||
const ranges = Object.keys(definition.ranges).reverse();
|
||||
const range = ranges.find(range => semver.satisfies(locator.reference, range));
|
||||
if (typeof range === `undefined`)
|
||||
throw new Error(`Assertion failed: Specified resolution (${locator.reference}) isn't supported by any of ${ranges.join(`, `)}`);
|
||||
lastKnownGood[locator.name] = locator.reference;
|
||||
|
||||
return await pmmUtils.installVersion(folderUtils.getInstallFolder(), locator, {
|
||||
spec: definition.ranges[range],
|
||||
});
|
||||
}
|
||||
await fs.promises.mkdir(path.dirname(lastKnownGoodFile), {recursive: true});
|
||||
await fs.promises.writeFile(lastKnownGoodFile, `${JSON.stringify(lastKnownGood, null, 2)}\n`);
|
||||
}
|
||||
|
||||
async resolveDescriptor(descriptor: Descriptor, {useCache = true}: {useCache?: boolean} = {}) {
|
||||
const definition = this.config.definitions[descriptor.name];
|
||||
if (typeof definition === `undefined`)
|
||||
throw new UsageError(`This package manager (${descriptor.name}) isn't supported by this pmm build`);
|
||||
async ensurePackageManager(locator: Locator) {
|
||||
const definition = this.config.definitions[locator.name];
|
||||
if (typeof definition === `undefined`)
|
||||
throw new UsageError(`This package manager (${locator.name}) isn't supported by this corepack build`);
|
||||
|
||||
// If a compatible version is already installed, no need to query one
|
||||
// from the remote listings
|
||||
const cachedVersion = await pmmUtils.findInstalledVersion(folderUtils.getInstallFolder(), descriptor);
|
||||
if (cachedVersion !== null && useCache)
|
||||
return {name: descriptor.name, reference: cachedVersion};
|
||||
const ranges = Object.keys(definition.ranges).reverse();
|
||||
const range = ranges.find(range => semver.satisfies(locator.reference, range));
|
||||
if (typeof range === `undefined`)
|
||||
throw new Error(`Assertion failed: Specified resolution (${locator.reference}) isn't supported by any of ${ranges.join(`, `)}`);
|
||||
|
||||
const candidateRangeDefinitions = Object.keys(definition.ranges).filter(range => {
|
||||
return semver.intersects(range, descriptor.range);
|
||||
});
|
||||
return await pmmUtils.installVersion(folderUtils.getInstallFolder(), locator, {
|
||||
spec: definition.ranges[range],
|
||||
});
|
||||
}
|
||||
|
||||
const tagResolutions = await Promise.all(candidateRangeDefinitions.map(async range => {
|
||||
return [range, await pmmUtils.fetchAvailableVersions(definition.ranges[range].tags)] as const;
|
||||
}));
|
||||
async resolveDescriptor(descriptor: Descriptor, {useCache = true}: {useCache?: boolean} = {}) {
|
||||
const definition = this.config.definitions[descriptor.name];
|
||||
if (typeof definition === `undefined`)
|
||||
throw new UsageError(`This package manager (${descriptor.name}) isn't supported by this corepack build`);
|
||||
|
||||
// If a version is available under multiple strategies (for example if
|
||||
// Yarn is published to both the v1 package and git), we only care
|
||||
// about the latest one
|
||||
const resolutionMap = new Map();
|
||||
for (const [range, resolutions] of tagResolutions)
|
||||
for (const entry of resolutions)
|
||||
resolutionMap.set(entry, range);
|
||||
// If a compatible version is already installed, no need to query one
|
||||
// from the remote listings
|
||||
const cachedVersion = await pmmUtils.findInstalledVersion(folderUtils.getInstallFolder(), descriptor);
|
||||
if (cachedVersion !== null && useCache)
|
||||
return {name: descriptor.name, reference: cachedVersion};
|
||||
|
||||
const candidates = [...resolutionMap.keys()];
|
||||
const maxSatisfying = semver.maxSatisfying(candidates, descriptor.range);
|
||||
if (maxSatisfying === null)
|
||||
return null;
|
||||
const candidateRangeDefinitions = Object.keys(definition.ranges).filter(range => {
|
||||
return semver.intersects(range, descriptor.range);
|
||||
});
|
||||
|
||||
return {name: descriptor.name, reference: maxSatisfying};
|
||||
}
|
||||
const tagResolutions = await Promise.all(candidateRangeDefinitions.map(async range => {
|
||||
return [range, await pmmUtils.fetchAvailableVersions(definition.ranges[range].tags)] as const;
|
||||
}));
|
||||
|
||||
// If a version is available under multiple strategies (for example if
|
||||
// Yarn is published to both the v1 package and git), we only care
|
||||
// about the latest one
|
||||
const resolutionMap = new Map();
|
||||
for (const [range, resolutions] of tagResolutions)
|
||||
for (const entry of resolutions)
|
||||
resolutionMap.set(entry, range);
|
||||
|
||||
const candidates = [...resolutionMap.keys()];
|
||||
const maxSatisfying = semver.maxSatisfying(candidates, descriptor.range);
|
||||
if (maxSatisfying === null)
|
||||
return null;
|
||||
|
||||
return {name: descriptor.name, reference: maxSatisfying};
|
||||
}
|
||||
|
||||
private getLastKnownGoodFile() {
|
||||
return path.join(folderUtils.getInstallFolder(), `lastKnownGood.json`);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,30 +1,64 @@
|
|||
import {Command} from 'clipanion';
|
||||
import path from 'path';
|
||||
import tar from 'tar';
|
||||
import {Command, UsageError} from 'clipanion';
|
||||
import path from 'path';
|
||||
import tar from 'tar';
|
||||
|
||||
import * as folderUtils from '../folderUtils';
|
||||
import {Context} from '../main';
|
||||
import * as folderUtils from '../folderUtils';
|
||||
import {Context} from '../main';
|
||||
import {SupportedPackageManagerSet, SupportedPackageManagers} from '../types';
|
||||
|
||||
export class HydrateCommand extends Command<Context> {
|
||||
static usage = Command.Usage({
|
||||
description: `Import a package manager into the cache`,
|
||||
details: `
|
||||
This command unpacks a package manager archive into the cache. The archive must have been generated by the \`pmm pack\` command - no other will work.
|
||||
static usage = Command.Usage({
|
||||
description: `Import a package manager into the cache`,
|
||||
details: `
|
||||
This command unpacks a package manager archive into the cache. The archive must have been generated by the \`corepack pack\` command - no other will work.
|
||||
`,
|
||||
examples: [[
|
||||
`Import a package manager in the cache`,
|
||||
`$0 hydrate pmm-yarn-2.2.2.tgz`,
|
||||
]],
|
||||
});
|
||||
examples: [[
|
||||
`Import a package manager in the cache`,
|
||||
`$0 hydrate corepack-yarn-2.2.2.tgz`,
|
||||
]],
|
||||
});
|
||||
|
||||
@Command.String()
|
||||
fileName!: string;
|
||||
@Command.String()
|
||||
fileName!: string;
|
||||
|
||||
@Command.Path(`hydrate`)
|
||||
async execute() {
|
||||
const installFolder = folderUtils.getInstallFolder();
|
||||
const fileName = path.resolve(this.context.cwd, this.fileName);
|
||||
@Command.Boolean(`--activate`)
|
||||
activate: boolean = false;
|
||||
|
||||
await tar.x({file: fileName, cwd: installFolder});
|
||||
}
|
||||
@Command.Path(`hydrate`)
|
||||
async execute() {
|
||||
const installFolder = folderUtils.getInstallFolder();
|
||||
const fileName = path.resolve(this.context.cwd, this.fileName);
|
||||
|
||||
const firstLevel = new Set();
|
||||
const secondLevel = new Set();
|
||||
|
||||
let hasShortEntries = false;
|
||||
|
||||
await tar.t({file: fileName, onentry: entry => {
|
||||
const segments = entry.header.path.split(/\//g);
|
||||
|
||||
if (segments.length < 3) {
|
||||
hasShortEntries = true;
|
||||
} else {
|
||||
firstLevel.add(segments[0]);
|
||||
secondLevel.add(segments[1]);
|
||||
}
|
||||
}});
|
||||
|
||||
if (hasShortEntries || firstLevel.size !== 1 || secondLevel.size !== 1)
|
||||
throw new UsageError(`Invalid archive format; did it get generated by 'corepack prepare'?`);
|
||||
|
||||
const name = [...firstLevel][0] as SupportedPackageManagers;
|
||||
const reference = [...secondLevel][0] as string;
|
||||
|
||||
if (!SupportedPackageManagerSet.has(name))
|
||||
throw new UsageError(`Unsupported package manager '${name}'`);
|
||||
|
||||
await tar.x({file: fileName, cwd: installFolder});
|
||||
|
||||
if (this.activate)
|
||||
await this.context.engine.activatePackageManager({name, reference});
|
||||
|
||||
this.context.stdout.write(`Hydrated ${name}@${reference}\n`);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,72 +0,0 @@
|
|||
import {Command, UsageError} from 'clipanion';
|
||||
import path from 'path';
|
||||
import tar from 'tar';
|
||||
|
||||
import {Context} from '../main'
|
||||
import * as folderUtils from '../folderUtils';
|
||||
import * as specUtils from '../specUtils';
|
||||
|
||||
export class PackCommand extends Command<Context> {
|
||||
static usage = Command.Usage({
|
||||
description: `Generate a package manager archive`,
|
||||
details: `
|
||||
This command generates an archive for the specified package manager, in a format suitable for later hydratation via the \`pmm hydrate\` command.
|
||||
|
||||
If run without parameter, it'll extract the package manager spec from the active project. Otherwise, an explicit spec string is required, that pmm will resolve before installing and packing.
|
||||
`,
|
||||
examples: [[
|
||||
`Generate an archive from the active project`,
|
||||
`$0 pack`,
|
||||
], [
|
||||
`Generate an archive from a specific Yarn version`,
|
||||
`$0 pack yarn@2.2.2`,
|
||||
]],
|
||||
});
|
||||
|
||||
@Command.String({required: false})
|
||||
spec?: string;
|
||||
|
||||
@Command.Boolean(`--json`)
|
||||
json: boolean = false;
|
||||
|
||||
@Command.Path(`pack`)
|
||||
async execute() {
|
||||
let spec;
|
||||
|
||||
if (typeof this.spec === `undefined`) {
|
||||
const lookup = await specUtils.loadSpec(this.context.cwd);
|
||||
switch (lookup.type) {
|
||||
case `NoProject`:
|
||||
throw new UsageError(`Couldn't find a project in the local directory - please explicit the package manager to pack, or run this command from a valid project`);
|
||||
|
||||
case `NoSpec`:
|
||||
throw new UsageError(`The local project doesn't feature a 'packageManager' field - please explicit the package manager to pack, or update the manifest to reference it`);
|
||||
|
||||
default: {
|
||||
spec = lookup.spec;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
spec = specUtils.parseSpec(this.spec);
|
||||
}
|
||||
|
||||
const resolved = await this.context.engine.resolveDescriptor(spec);
|
||||
if (resolved === null)
|
||||
throw new UsageError(`Failed to successfully resolve '${spec.range}' to a valid ${spec.name} release`);
|
||||
|
||||
const baseInstallFolder = folderUtils.getInstallFolder();
|
||||
const installFolder = await this.context.engine.ensurePackageManager(resolved);
|
||||
|
||||
const fileName = typeof this.spec !== `undefined`
|
||||
? path.join(this.context.cwd, `pmm-${resolved.name}-${resolved.reference}.tgz`)
|
||||
: path.join(this.context.cwd, `pmm-${resolved.name}.tgz`);
|
||||
|
||||
await tar.c({gzip: true, cwd: baseInstallFolder, file: fileName}, [path.relative(baseInstallFolder, installFolder)]);
|
||||
|
||||
if (this.json) {
|
||||
this.context.stdout.write(`${JSON.stringify(fileName)}\n`);
|
||||
} else {
|
||||
this.context.stdout.write(`Packed ${fileName}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,99 @@
|
|||
import {Command, UsageError} from 'clipanion';
|
||||
import path from 'path';
|
||||
import tar from 'tar';
|
||||
|
||||
import * as folderUtils from '../folderUtils';
|
||||
import {Context} from '../main';
|
||||
import * as specUtils from '../specUtils';
|
||||
import {Descriptor} from '../types';
|
||||
|
||||
export class PrepareCommand extends Command<Context> {
|
||||
static usage = Command.Usage({
|
||||
description: `Generate a package manager archive`,
|
||||
details: `
|
||||
This command generates an archive for the specified package manager, in a format suitable for later hydratation via the \`corepack hydrate\` command.
|
||||
|
||||
If run without parameter, it'll extract the package manager spec from the active project. Otherwise an explicit spec string is required, that Corepack will resolve before installing and packing.
|
||||
`,
|
||||
examples: [[
|
||||
`Generate an archive from the active project`,
|
||||
`$0 prepare`,
|
||||
], [
|
||||
`Generate an archive from a specific Yarn version`,
|
||||
`$0 prepare yarn@2.2.2`,
|
||||
]],
|
||||
});
|
||||
|
||||
@Command.String({required: false})
|
||||
spec?: string;
|
||||
|
||||
@Command.Boolean(`--cache-only`)
|
||||
cacheOnly: boolean = false;
|
||||
|
||||
@Command.Boolean(`--activate`)
|
||||
activate: boolean = false;
|
||||
|
||||
@Command.Boolean(`--all`)
|
||||
all: boolean = false;
|
||||
|
||||
@Command.Boolean(`--json`)
|
||||
json: boolean = false;
|
||||
|
||||
@Command.Path(`prepare`)
|
||||
async execute() {
|
||||
if (this.all && typeof this.spec !== `undefined`)
|
||||
throw new UsageError(`The --all option cannot be used along with an explicit package manager specification`);
|
||||
|
||||
const specs = this.all
|
||||
? await this.context.engine.getDefaultDescriptors()
|
||||
: [this.spec];
|
||||
|
||||
for (const request of specs) {
|
||||
let spec: Descriptor;
|
||||
|
||||
if (typeof request === `undefined`) {
|
||||
const lookup = await specUtils.loadSpec(this.context.cwd);
|
||||
switch (lookup.type) {
|
||||
case `NoProject`:
|
||||
throw new UsageError(`Couldn't find a project in the local directory - please explicit the package manager to pack, or run this command from a valid project`);
|
||||
|
||||
case `NoSpec`:
|
||||
throw new UsageError(`The local project doesn't feature a 'packageManager' field - please explicit the package manager to pack, or update the manifest to reference it`);
|
||||
|
||||
default: {
|
||||
spec = lookup.spec;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
spec = typeof request === `string`
|
||||
? specUtils.parseSpec(request, `CLI arguments`)
|
||||
: request;
|
||||
}
|
||||
|
||||
const resolved = await this.context.engine.resolveDescriptor(spec);
|
||||
if (resolved === null)
|
||||
throw new UsageError(`Failed to successfully resolve '${spec.range}' to a valid ${spec.name} release`);
|
||||
|
||||
const baseInstallFolder = folderUtils.getInstallFolder();
|
||||
const installFolder = await this.context.engine.ensurePackageManager(resolved);
|
||||
|
||||
if (this.activate)
|
||||
await this.context.engine.activatePackageManager(resolved);
|
||||
|
||||
if (this.cacheOnly)
|
||||
continue;
|
||||
|
||||
const fileName = typeof request !== `undefined`
|
||||
? path.join(this.context.cwd, `corepack-${resolved.name}-${resolved.reference}.tgz`)
|
||||
: path.join(this.context.cwd, `corepack-${resolved.name}.tgz`);
|
||||
|
||||
await tar.c({gzip: true, cwd: baseInstallFolder, file: fileName}, [path.relative(baseInstallFolder, installFolder)]);
|
||||
|
||||
if (this.json) {
|
||||
this.context.stdout.write(`${JSON.stringify(fileName)}\n`);
|
||||
} else {
|
||||
this.context.stdout.write(`Packed ${fileName}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
import makeDebug from 'debug';
|
||||
|
||||
export const log = makeDebug(`pmm`);
|
||||
export const log = makeDebug(`corepack`);
|
||||
|
|
|
|||
|
|
@ -3,26 +3,26 @@ import {homedir, tmpdir} from 'os';
|
|||
import {join} from 'path';
|
||||
|
||||
export function getInstallFolder() {
|
||||
return process.env.PMM_HOME ?? join(homedir(), `.node/pmm`);
|
||||
return process.env.COREPACK_HOME ?? join(homedir(), `.node/corepack`);
|
||||
}
|
||||
|
||||
export function getTemporaryFolder(target: string = tmpdir()) {
|
||||
mkdirSync(target, {recursive: true});
|
||||
mkdirSync(target, {recursive: true});
|
||||
|
||||
while (true) {
|
||||
const rnd = Math.random() * 0x100000000;
|
||||
const hex = rnd.toString(16).padStart(8, `0`);
|
||||
const path = join(target, `pmm-${process.pid}-${hex}`);
|
||||
while (true) {
|
||||
const rnd = Math.random() * 0x100000000;
|
||||
const hex = rnd.toString(16).padStart(8, `0`);
|
||||
const path = join(target, `corepack-${process.pid}-${hex}`);
|
||||
|
||||
try {
|
||||
mkdirSync(path);
|
||||
return path;
|
||||
} catch (error) {
|
||||
if (error.code === `EEXIST`) {
|
||||
continue;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
try {
|
||||
mkdirSync(path);
|
||||
return path;
|
||||
} catch (error) {
|
||||
if (error.code === `EEXIST`) {
|
||||
continue;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import fs from 'fs';
|
||||
import fs from 'fs';
|
||||
import {dirname, relative} from 'path';
|
||||
|
||||
export async function mutex<T>(p: string, cb: () => Promise<T>) {
|
||||
return await cb();
|
||||
return await cb();
|
||||
}
|
||||
|
||||
export async function makeShim(target: string, path: string) {
|
||||
await fs.promises.symlink(relative(dirname(target), path), target, `file`);
|
||||
await fs.promises.symlink(relative(dirname(target), path), target, `file`);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,57 +1,57 @@
|
|||
import {UsageError} from 'clipanion';
|
||||
import {UsageError} from 'clipanion';
|
||||
import https, {RequestOptions} from 'https';
|
||||
import {IncomingMessage} from 'http';
|
||||
import {IncomingMessage} from 'http';
|
||||
|
||||
export function fetchUrlStream(url: string, options: RequestOptions = {}) {
|
||||
if (process.env.PMM_ENABLE_NETWORK === `0`)
|
||||
throw new UsageError(`Network access disabled by the environment; can't reach ${url}`);
|
||||
if (process.env.COREPACK_ENABLE_NETWORK === `0`)
|
||||
throw new UsageError(`Network access disabled by the environment; can't reach ${url}`);
|
||||
|
||||
return new Promise<IncomingMessage>((resolve, reject) => {
|
||||
const request = https.get(url, options, response => {
|
||||
const statusCode = response.statusCode ?? 500;
|
||||
if (!(statusCode >= 200 && statusCode < 300))
|
||||
return reject(new Error(`Server answered with HTTP ${statusCode}`));
|
||||
return new Promise<IncomingMessage>((resolve, reject) => {
|
||||
const request = https.get(url, options, response => {
|
||||
const statusCode = response.statusCode ?? 500;
|
||||
if (!(statusCode >= 200 && statusCode < 300))
|
||||
return reject(new Error(`Server answered with HTTP ${statusCode}`));
|
||||
|
||||
resolve(response);
|
||||
});
|
||||
|
||||
request.on(`error`, err => {
|
||||
reject(err);
|
||||
});
|
||||
return resolve(response);
|
||||
});
|
||||
|
||||
request.on(`error`, err => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function fetchAsBuffer(url: string, options?: RequestOptions) {
|
||||
const response = await fetchUrlStream(url, options);
|
||||
const response = await fetchUrlStream(url, options);
|
||||
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Array<Buffer> = [];
|
||||
|
||||
response.on(`data`, chunk => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
response.on(`error`, error => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
response.on(`end`, () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
response.on(`data`, chunk => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
response.on(`error`, error => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
response.on(`end`, () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function fetchAsJson(url: string, options?: RequestOptions) {
|
||||
const buffer = await fetchAsBuffer(url, options);
|
||||
const asText = buffer.toString();
|
||||
const buffer = await fetchAsBuffer(url, options);
|
||||
const asText = buffer.toString();
|
||||
|
||||
try {
|
||||
return JSON.parse(asText);
|
||||
} catch (error) {
|
||||
const truncated = asText.length > 30
|
||||
? asText.slice(0, 30) + `...`
|
||||
: asText;
|
||||
try {
|
||||
return JSON.parse(asText);
|
||||
} catch (error) {
|
||||
const truncated = asText.length > 30
|
||||
? `${asText.slice(0, 30)}...`
|
||||
: asText;
|
||||
|
||||
throw new Error(`Couldn't parse JSON data: ${JSON.stringify(truncated)}`);
|
||||
}
|
||||
throw new Error(`Couldn't parse JSON data: ${JSON.stringify(truncated)}`);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
152
sources/main.ts
152
sources/main.ts
|
|
@ -1,98 +1,98 @@
|
|||
import {BaseContext, Cli, Command, UsageError} from 'clipanion';
|
||||
|
||||
import {HydrateCommand} from './commands/Hydrate';
|
||||
import {PackCommand} from './commands/Pack';
|
||||
import {Engine} from './Engine';
|
||||
import * as miscUtils from './miscUtils';
|
||||
import * as pmmUtils from './pmmUtils';
|
||||
import * as specUtils from './specUtils';
|
||||
import {Locator, isSupportedPackageManager} from './types';
|
||||
import {Engine} from './Engine';
|
||||
import {HydrateCommand} from './commands/Hydrate';
|
||||
import {PrepareCommand} from './commands/Prepare';
|
||||
import * as miscUtils from './miscUtils';
|
||||
import * as pmmUtils from './pmmUtils';
|
||||
import * as specUtils from './specUtils';
|
||||
import {Locator, isSupportedPackageManager} from './types';
|
||||
|
||||
export type CustomContext = {cwd: string, engine: Engine};
|
||||
export type Context = BaseContext & CustomContext;
|
||||
|
||||
export async function main(argv: string[], context: CustomContext & Partial<Context>) {
|
||||
const firstArg = argv[0];
|
||||
export async function main(argv: Array<string>, context: CustomContext & Partial<Context>) {
|
||||
const firstArg = argv[0];
|
||||
|
||||
if (isSupportedPackageManager(firstArg)) {
|
||||
const packageManager = firstArg;
|
||||
const binaryName = argv[1];
|
||||
if (isSupportedPackageManager(firstArg)) {
|
||||
const packageManager = firstArg;
|
||||
const binaryName = argv[1];
|
||||
|
||||
// Note: we're playing a bit with Clipanion here, since instead of letting it
|
||||
// decide how to route the commands, we'll instead tweak the init settings
|
||||
// based on the arguments.
|
||||
const cli = new Cli<Context>({binaryName});
|
||||
const defaultVersion = context.engine.getDefaultVersion(firstArg);
|
||||
// Note: we're playing a bit with Clipanion here, since instead of letting it
|
||||
// decide how to route the commands, we'll instead tweak the init settings
|
||||
// based on the arguments.
|
||||
const cli = new Cli<Context>({binaryName});
|
||||
const defaultVersion = await context.engine.getDefaultVersion(firstArg);
|
||||
|
||||
const potentialLocator: Locator = {
|
||||
name: packageManager,
|
||||
reference: defaultVersion,
|
||||
};
|
||||
const potentialLocator: Locator = {
|
||||
name: packageManager,
|
||||
reference: defaultVersion,
|
||||
};
|
||||
|
||||
class BinaryCommand extends Command<Context> {
|
||||
public proxy: string[] = [];
|
||||
class BinaryCommand extends Command<Context> {
|
||||
public proxy: Array<string> = [];
|
||||
|
||||
async execute() {
|
||||
let descriptor;
|
||||
try {
|
||||
descriptor = await specUtils.findProjectSpec(this.context.cwd, potentialLocator);
|
||||
} catch (err) {
|
||||
if (err instanceof miscUtils.Cancellation) {
|
||||
return 1;
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const resolved = await context.engine.resolveDescriptor(descriptor);
|
||||
if (resolved === null)
|
||||
throw new UsageError(`Failed to successfully resolve '${descriptor.range}' to a valid ${descriptor.name} release`);
|
||||
|
||||
const installTarget = await context.engine.ensurePackageManager(resolved);
|
||||
const exitCode = await pmmUtils.runVersion(installTarget, resolved, binaryName, this.proxy, this.context);
|
||||
|
||||
return exitCode;
|
||||
}
|
||||
async execute() {
|
||||
let descriptor;
|
||||
try {
|
||||
descriptor = await specUtils.findProjectSpec(this.context.cwd, potentialLocator);
|
||||
} catch (err) {
|
||||
if (err instanceof miscUtils.Cancellation) {
|
||||
return 1;
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
BinaryCommand.addPath();
|
||||
BinaryCommand.addOption(`proxy`, Command.Proxy());
|
||||
const resolved = await context.engine.resolveDescriptor(descriptor);
|
||||
if (resolved === null)
|
||||
throw new UsageError(`Failed to successfully resolve '${descriptor.range}' to a valid ${descriptor.name} release`);
|
||||
|
||||
cli.register(BinaryCommand);
|
||||
const installTarget = await context.engine.ensurePackageManager(resolved);
|
||||
const exitCode = await pmmUtils.runVersion(installTarget, resolved, binaryName, this.proxy, this.context);
|
||||
|
||||
return await cli.run(argv.slice(2), {
|
||||
...Cli.defaultContext,
|
||||
...context,
|
||||
});
|
||||
} else {
|
||||
const cli = new Cli<Context>({binaryName: `pmm`});
|
||||
|
||||
cli.register(Command.Entries.Help as any);
|
||||
|
||||
cli.register(HydrateCommand);
|
||||
cli.register(PackCommand);
|
||||
|
||||
return await cli.run(argv, {
|
||||
...Cli.defaultContext,
|
||||
...context,
|
||||
});
|
||||
return exitCode;
|
||||
}
|
||||
}
|
||||
|
||||
BinaryCommand.addPath();
|
||||
BinaryCommand.addOption(`proxy`, Command.Proxy());
|
||||
|
||||
cli.register(BinaryCommand);
|
||||
|
||||
return await cli.run(argv.slice(2), {
|
||||
...Cli.defaultContext,
|
||||
...context,
|
||||
});
|
||||
} else {
|
||||
const cli = new Cli<Context>({binaryName: `corepack`});
|
||||
|
||||
cli.register(Command.Entries.Help as any);
|
||||
|
||||
cli.register(HydrateCommand);
|
||||
cli.register(PrepareCommand);
|
||||
|
||||
return await cli.run(argv, {
|
||||
...Cli.defaultContext,
|
||||
...context,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function runMain(argv: string[]) {
|
||||
main(argv, {
|
||||
cwd: process.cwd(),
|
||||
engine: new Engine(),
|
||||
}).then(exitCode => {
|
||||
process.exitCode = exitCode;
|
||||
}, err => {
|
||||
console.error(err.stack);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
export function runMain(argv: Array<string>) {
|
||||
main(argv, {
|
||||
cwd: process.cwd(),
|
||||
engine: new Engine(),
|
||||
}).then(exitCode => {
|
||||
process.exitCode = exitCode;
|
||||
}, err => {
|
||||
console.error(err.stack);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
}
|
||||
|
||||
declare const __non_webpack_require__: any;
|
||||
|
||||
if (typeof __non_webpack_require__ === `undefined` && process.mainModule === module) {
|
||||
runMain(process.argv.slice(2));
|
||||
}
|
||||
if (typeof __non_webpack_require__ === `undefined` && process.mainModule === module)
|
||||
runMain(process.argv.slice(2));
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +1,16 @@
|
|||
import {execFile, StdioOptions, spawn} from 'child_process';
|
||||
import fs, { existsSync } from 'fs';
|
||||
import path from 'path';
|
||||
import semver from 'semver';
|
||||
import tar from 'tar';
|
||||
import {promisify} from 'util';
|
||||
import {execFile, StdioOptions, spawn} from 'child_process';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import semver from 'semver';
|
||||
import tar from 'tar';
|
||||
import {promisify} from 'util';
|
||||
|
||||
import * as debugUtils from './debugUtils';
|
||||
import * as fsUtils from './fsUtils';
|
||||
import * as folderUtils from './folderUtils';
|
||||
import * as httpUtils from './httpUtils';
|
||||
import * as debugUtils from './debugUtils';
|
||||
import * as folderUtils from './folderUtils';
|
||||
import * as fsUtils from './fsUtils';
|
||||
import * as httpUtils from './httpUtils';
|
||||
import {Context} from './main';
|
||||
import {TagSpec, Descriptor, Locator, PackageManagerSpec} from './types';
|
||||
import { Context } from './main';
|
||||
|
||||
const execFileP = promisify(execFile);
|
||||
|
||||
|
|
@ -18,165 +18,165 @@ const NL_REGEXP = /\n/;
|
|||
const REFS_TAGS_REGEXP = /^[a-f0-9]+\trefs\/tags\/(.*)\^\{\}$/;
|
||||
|
||||
export async function fetchAvailableVersions(spec: TagSpec) {
|
||||
switch (spec.type) {
|
||||
case `npm`: {
|
||||
const data = await httpUtils.fetchAsJson(`https://registry.npmjs.org/${spec.package}`, {headers: {[`Accept`]: `application/vnd.npm.install-v1+json`}});
|
||||
return Object.keys(data.versions);
|
||||
} break;
|
||||
switch (spec.type) {
|
||||
case `npm`: {
|
||||
const data = await httpUtils.fetchAsJson(`https://registry.npmjs.org/${spec.package}`, {headers: {[`Accept`]: `application/vnd.npm.install-v1+json`}});
|
||||
return Object.keys(data.versions);
|
||||
} break;
|
||||
|
||||
case `git`: {
|
||||
const {stdout} = await execFileP(`git`, [`ls-remote`, `--tags`, spec.repository]);
|
||||
const lines = stdout.split(NL_REGEXP);
|
||||
case `git`: {
|
||||
const {stdout} = await execFileP(`git`, [`ls-remote`, `--tags`, spec.repository]);
|
||||
const lines = stdout.split(NL_REGEXP);
|
||||
|
||||
const regexp = new RegExp(`^${spec.pattern.replace(`{}`, `(.*)`)}$`);
|
||||
const regexp = new RegExp(`^${spec.pattern.replace(`{}`, `(.*)`)}$`);
|
||||
|
||||
const results = [];
|
||||
for (const line of lines) {
|
||||
const lv1 = line.match(REFS_TAGS_REGEXP);
|
||||
if (!lv1)
|
||||
continue;
|
||||
const results = [];
|
||||
for (const line of lines) {
|
||||
const lv1 = line.match(REFS_TAGS_REGEXP);
|
||||
if (!lv1)
|
||||
continue;
|
||||
|
||||
const lv2 = lv1[1].match(regexp);
|
||||
if (!lv2)
|
||||
continue;
|
||||
const lv2 = lv1[1].match(regexp);
|
||||
if (!lv2)
|
||||
continue;
|
||||
|
||||
results.push(lv2[1]);
|
||||
}
|
||||
results.push(lv2[1]);
|
||||
}
|
||||
|
||||
return results;
|
||||
} break;
|
||||
return results;
|
||||
} break;
|
||||
|
||||
default: {
|
||||
throw new Error(`Unsupported specification ${JSON.stringify(spec)}`);
|
||||
} break;
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unsupported specification ${JSON.stringify(spec)}`);
|
||||
} break;
|
||||
}
|
||||
}
|
||||
|
||||
export async function findInstalledVersion(installTarget: string, descriptor: Descriptor) {
|
||||
const installFolder = path.join(installTarget, descriptor.name);
|
||||
const installFolder = path.join(installTarget, descriptor.name);
|
||||
|
||||
let folderContent: string[];
|
||||
try {
|
||||
folderContent = await fs.promises.readdir(installFolder);
|
||||
} catch (error) {
|
||||
if (error.code === `ENOENT`) {
|
||||
folderContent = [];
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
let folderContent: Array<string>;
|
||||
try {
|
||||
folderContent = await fs.promises.readdir(installFolder);
|
||||
} catch (error) {
|
||||
if (error.code === `ENOENT`) {
|
||||
folderContent = [];
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const candidateVersions: string[] = [];
|
||||
for (const entry of folderContent) {
|
||||
// Some dot-folders tend to pop inside directories, especially on OSX
|
||||
if (entry.startsWith(`.`))
|
||||
continue;
|
||||
const candidateVersions: Array<string> = [];
|
||||
for (const entry of folderContent) {
|
||||
// Some dot-folders tend to pop inside directories, especially on OSX
|
||||
if (entry.startsWith(`.`))
|
||||
continue;
|
||||
|
||||
candidateVersions.push(entry);
|
||||
}
|
||||
candidateVersions.push(entry);
|
||||
}
|
||||
|
||||
const bestMatch = semver.maxSatisfying(candidateVersions, descriptor.range);
|
||||
if (bestMatch === null)
|
||||
return null;
|
||||
const bestMatch = semver.maxSatisfying(candidateVersions, descriptor.range);
|
||||
if (bestMatch === null)
|
||||
return null;
|
||||
|
||||
return bestMatch;
|
||||
return bestMatch;
|
||||
}
|
||||
|
||||
export async function installVersion(installTarget: string, locator: Locator, {spec}: {spec: PackageManagerSpec}) {
|
||||
const installFolder = path.join(installTarget, locator.name, locator.reference);
|
||||
if (fs.existsSync(installFolder)) {
|
||||
debugUtils.log(`Reusing ${locator.name}@${locator.reference}`);
|
||||
return installFolder;
|
||||
const installFolder = path.join(installTarget, locator.name, locator.reference);
|
||||
if (fs.existsSync(installFolder)) {
|
||||
debugUtils.log(`Reusing ${locator.name}@${locator.reference}`);
|
||||
return installFolder;
|
||||
}
|
||||
|
||||
const url = spec.url.replace(`{}`, locator.reference);
|
||||
debugUtils.log(`Installing ${locator.name}@${locator.reference} from ${url}`);
|
||||
|
||||
return await fsUtils.mutex(installFolder, async () => {
|
||||
// Creating a temporary folder inside the install folder means that we
|
||||
// are sure it'll be in the same drive as the destination, so we can
|
||||
// just move it there atomically once we are done
|
||||
|
||||
const tmpFolder = folderUtils.getTemporaryFolder(installTarget);
|
||||
const stream = await httpUtils.fetchUrlStream(url);
|
||||
|
||||
const parsedUrl = new URL(url);
|
||||
const ext = path.posix.extname(parsedUrl.pathname);
|
||||
|
||||
let outputFile: string | null = null;
|
||||
|
||||
let sendTo: any;
|
||||
if (ext === `.tgz`) {
|
||||
sendTo = tar.x({strip: 1, cwd: tmpFolder});
|
||||
} else if (ext === `.js`) {
|
||||
outputFile = path.join(tmpFolder, path.posix.basename(parsedUrl.pathname));
|
||||
sendTo = fs.createWriteStream(outputFile);
|
||||
}
|
||||
|
||||
const url = spec.url.replace(`{}`, locator.reference);
|
||||
debugUtils.log(`Installing ${locator.name}@${locator.reference} from ${url}`);
|
||||
stream.pipe(sendTo);
|
||||
|
||||
return await fsUtils.mutex(installFolder, async () => {
|
||||
// Creating a temporary folder inside the install folder means that we
|
||||
// are sure it'll be in the same drive as the destination, so we can
|
||||
// just move it there atomically once we are done
|
||||
|
||||
const tmpFolder = folderUtils.getTemporaryFolder(installTarget);
|
||||
const stream = await httpUtils.fetchUrlStream(url);
|
||||
|
||||
const parsedUrl = new URL(url);
|
||||
const ext = path.posix.extname(parsedUrl.pathname);
|
||||
|
||||
let outputFile: string | null = null;
|
||||
|
||||
let sendTo: any;
|
||||
if (ext === `.tgz`) {
|
||||
sendTo = tar.x({strip: 1, cwd: tmpFolder});
|
||||
} else if (ext === `.js`) {
|
||||
outputFile = path.join(tmpFolder, path.posix.basename(parsedUrl.pathname));
|
||||
sendTo = fs.createWriteStream(outputFile);
|
||||
}
|
||||
|
||||
stream.pipe(sendTo);
|
||||
|
||||
await new Promise(resolve => {
|
||||
sendTo.on(`finish`, resolve);
|
||||
});
|
||||
|
||||
await fs.promises.mkdir(path.join(tmpFolder, `.bin`));
|
||||
|
||||
if (Array.isArray(spec.bin)) {
|
||||
if (outputFile !== null) {
|
||||
for (const name of spec.bin) {
|
||||
await fsUtils.makeShim(path.join(tmpFolder, `.bin`, name), outputFile);
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Assertion failed`);
|
||||
}
|
||||
} else {
|
||||
for (const [name, dest] of Object.entries(spec.bin)) {
|
||||
fsUtils.makeShim(path.join(tmpFolder, `.bin`, name), path.join(tmpFolder, dest));
|
||||
}
|
||||
}
|
||||
|
||||
await fs.promises.mkdir(path.dirname(installFolder), {recursive: true});
|
||||
await fs.promises.rename(tmpFolder, installFolder);
|
||||
|
||||
debugUtils.log(`Install finished`);
|
||||
return installFolder;
|
||||
await new Promise(resolve => {
|
||||
sendTo.on(`finish`, resolve);
|
||||
});
|
||||
|
||||
await fs.promises.mkdir(path.join(tmpFolder, `.bin`));
|
||||
|
||||
if (Array.isArray(spec.bin)) {
|
||||
if (outputFile !== null) {
|
||||
for (const name of spec.bin) {
|
||||
await fsUtils.makeShim(path.join(tmpFolder, `.bin`, name), outputFile);
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Assertion failed`);
|
||||
}
|
||||
} else {
|
||||
for (const [name, dest] of Object.entries(spec.bin)) {
|
||||
fsUtils.makeShim(path.join(tmpFolder, `.bin`, name), path.join(tmpFolder, dest));
|
||||
}
|
||||
}
|
||||
|
||||
await fs.promises.mkdir(path.dirname(installFolder), {recursive: true});
|
||||
await fs.promises.rename(tmpFolder, installFolder);
|
||||
|
||||
debugUtils.log(`Install finished`);
|
||||
return installFolder;
|
||||
});
|
||||
}
|
||||
|
||||
export async function runVersion(installTarget: string, locator: Locator, binName: string, args: string[], context: Context) {
|
||||
const binPath = path.join(installTarget, `.bin`, binName);
|
||||
export async function runVersion(installTarget: string, locator: Locator, binName: string, args: Array<string>, context: Context) {
|
||||
const binPath = path.join(installTarget, `.bin`, binName);
|
||||
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
process.on(`SIGINT`, () => {
|
||||
// We don't want to exit the process before the child, so we just
|
||||
// ignore SIGINT and wait for the regular exit to happen (the child
|
||||
// will receive SIGINT too since it's part of the same process grp)
|
||||
});
|
||||
|
||||
const stdio: StdioOptions = [`pipe`, `pipe`, `pipe`];
|
||||
|
||||
if (context.stdin === process.stdin)
|
||||
stdio[0] = `inherit`;
|
||||
if (context.stdout === process.stdout)
|
||||
stdio[1] = `inherit`;
|
||||
if (context.stderr === process.stderr)
|
||||
stdio[2] = `inherit`;
|
||||
|
||||
const sub = spawn(process.execPath, [binPath, ...args], {
|
||||
cwd: context.cwd,
|
||||
stdio,
|
||||
});
|
||||
|
||||
if (context.stdin !== process.stdin)
|
||||
context.stdin.pipe(sub.stdin!);
|
||||
if (context.stdout !== process.stdout)
|
||||
sub.stdout!.pipe(context.stdout);
|
||||
if (context.stderr !== process.stderr)
|
||||
sub.stderr!.pipe(context.stderr);
|
||||
|
||||
sub.on(`exit`, exitCode => {
|
||||
resolve(exitCode !== null ? exitCode : 1);
|
||||
});
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
process.on(`SIGINT`, () => {
|
||||
// We don't want to exit the process before the child, so we just
|
||||
// ignore SIGINT and wait for the regular exit to happen (the child
|
||||
// will receive SIGINT too since it's part of the same process grp)
|
||||
});
|
||||
|
||||
const stdio: StdioOptions = [`pipe`, `pipe`, `pipe`];
|
||||
|
||||
if (context.stdin === process.stdin)
|
||||
stdio[0] = `inherit`;
|
||||
if (context.stdout === process.stdout)
|
||||
stdio[1] = `inherit`;
|
||||
if (context.stderr === process.stderr)
|
||||
stdio[2] = `inherit`;
|
||||
|
||||
const sub = spawn(process.execPath, [binPath, ...args], {
|
||||
cwd: context.cwd,
|
||||
stdio,
|
||||
});
|
||||
|
||||
if (context.stdin !== process.stdin)
|
||||
context.stdin.pipe(sub.stdin!);
|
||||
if (context.stdout !== process.stdout)
|
||||
sub.stdout!.pipe(context.stdout);
|
||||
if (context.stderr !== process.stderr)
|
||||
sub.stderr!.pipe(context.stderr);
|
||||
|
||||
sub.on(`exit`, exitCode => {
|
||||
resolve(exitCode !== null ? exitCode : 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,27 +1,27 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import semver from 'semver';
|
||||
import { UsageError } from 'clipanion';
|
||||
import Enquirer from 'enquirer';
|
||||
import {UsageError} from 'clipanion';
|
||||
import Enquirer from 'enquirer';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import semver from 'semver';
|
||||
|
||||
import * as miscUtils from './miscUtils';
|
||||
import * as miscUtils from './miscUtils';
|
||||
import {SupportedPackageManagers, SupportedPackageManagerSet, Descriptor, Locator} from './types';
|
||||
|
||||
export function parseSpec(raw: unknown, source?: string): Descriptor {
|
||||
if (typeof raw !== `string`)
|
||||
throw new UsageError(`Invalid package manager specification in ${source}; expected a semver range`);
|
||||
if (typeof raw !== `string`)
|
||||
throw new UsageError(`Invalid package manager specification in ${source}; expected a string`);
|
||||
|
||||
const match = raw.match(/^(?!_)(.+)@(.+)$/);
|
||||
if (match === null || !semver.validRange(match[2]))
|
||||
throw new UsageError(`Invalid package manager specification in ${source}; expected a semver range`);
|
||||
const match = raw.match(/^(?!_)(.+)@(.+)$/);
|
||||
if (match === null || !semver.validRange(match[2]))
|
||||
throw new UsageError(`Invalid package manager specification in ${source}; expected a semver range`);
|
||||
|
||||
if (!SupportedPackageManagerSet.has(match[1]))
|
||||
throw new UsageError(`Unsupported package manager specification (${match})`);
|
||||
if (!SupportedPackageManagerSet.has(match[1]))
|
||||
throw new UsageError(`Unsupported package manager specification (${match})`);
|
||||
|
||||
return {
|
||||
name: match[1] as SupportedPackageManagers,
|
||||
range: match[2],
|
||||
};
|
||||
return {
|
||||
name: match[1] as SupportedPackageManagers,
|
||||
range: match[2],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -41,28 +41,28 @@ export function parseSpec(raw: unknown, source?: string): Descriptor {
|
|||
* don't need to ask again in the future.
|
||||
*/
|
||||
export async function findProjectSpec(initialCwd: string, locator: Locator): Promise<Descriptor> {
|
||||
while (true) {
|
||||
const result = await loadSpec(initialCwd);
|
||||
while (true) {
|
||||
const result = await loadSpec(initialCwd);
|
||||
|
||||
switch (result.type) {
|
||||
case `NoProject`: {
|
||||
await initProjectAndSpec(result.target, locator);
|
||||
} break;
|
||||
switch (result.type) {
|
||||
case `NoProject`: {
|
||||
await initProjectAndSpec(result.target, locator);
|
||||
} break;
|
||||
|
||||
case `NoSpec`: {
|
||||
// A locator is a valid descriptor (but not the other way around)
|
||||
return {name: locator.name, range: locator.reference};
|
||||
} break;
|
||||
case `NoSpec`: {
|
||||
// A locator is a valid descriptor (but not the other way around)
|
||||
return {name: locator.name, range: locator.reference};
|
||||
} break;
|
||||
|
||||
case `Found`: {
|
||||
if (result.spec.name !== locator.name) {
|
||||
throw new UsageError(`This project is configured to use ${result.spec.name}`);
|
||||
} else {
|
||||
return result.spec;
|
||||
}
|
||||
} break;
|
||||
case `Found`: {
|
||||
if (result.spec.name !== locator.name) {
|
||||
throw new UsageError(`This project is configured to use ${result.spec.name}`);
|
||||
} else {
|
||||
return result.spec;
|
||||
}
|
||||
} break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type LoadSpecResult =
|
||||
|
|
@ -71,82 +71,82 @@ export type LoadSpecResult =
|
|||
| {type: `Found`, spec: Descriptor};
|
||||
|
||||
export async function loadSpec(initialCwd: string): Promise<LoadSpecResult> {
|
||||
let nextCwd = initialCwd;
|
||||
let currCwd = ``;
|
||||
let nextCwd = initialCwd;
|
||||
let currCwd = ``;
|
||||
|
||||
let selection: any = null;
|
||||
let selection: any = null;
|
||||
|
||||
while (nextCwd !== currCwd && selection === null) {
|
||||
currCwd = nextCwd;
|
||||
nextCwd = path.dirname(currCwd);
|
||||
while (nextCwd !== currCwd && selection === null) {
|
||||
currCwd = nextCwd;
|
||||
nextCwd = path.dirname(currCwd);
|
||||
|
||||
const manifestPath = path.join(currCwd, `package.json`);
|
||||
if (!fs.existsSync(manifestPath))
|
||||
continue;
|
||||
const manifestPath = path.join(currCwd, `package.json`);
|
||||
if (!fs.existsSync(manifestPath))
|
||||
continue;
|
||||
|
||||
const content = await fs.promises.readFile(manifestPath, `utf8`);
|
||||
const content = await fs.promises.readFile(manifestPath, `utf8`);
|
||||
|
||||
let data;
|
||||
try {
|
||||
data = JSON.parse(content);
|
||||
} catch {}
|
||||
let data;
|
||||
try {
|
||||
data = JSON.parse(content);
|
||||
} catch {}
|
||||
|
||||
if (typeof data !== `object` || data === null)
|
||||
throw new UsageError(`Invalid package.json in ${path.relative(initialCwd, manifestPath)}`);
|
||||
if (typeof data !== `object` || data === null)
|
||||
throw new UsageError(`Invalid package.json in ${path.relative(initialCwd, manifestPath)}`);
|
||||
|
||||
selection = {data, manifestPath};
|
||||
}
|
||||
selection = {data, manifestPath};
|
||||
}
|
||||
|
||||
if (selection === null)
|
||||
return {type: `NoProject`, target: path.join(initialCwd, `package.json`)};
|
||||
if (selection === null)
|
||||
return {type: `NoProject`, target: path.join(initialCwd, `package.json`)};
|
||||
|
||||
const rawPmSpec = selection.data.packageManager;
|
||||
if (typeof rawPmSpec === `undefined`)
|
||||
return {type: `NoSpec`, target: selection.manifestPath};
|
||||
const rawPmSpec = selection.data.packageManager;
|
||||
if (typeof rawPmSpec === `undefined`)
|
||||
return {type: `NoSpec`, target: selection.manifestPath};
|
||||
|
||||
return {
|
||||
type: `Found`,
|
||||
spec: parseSpec(rawPmSpec, path.relative(initialCwd, selection.manifestPath)),
|
||||
};
|
||||
return {
|
||||
type: `Found`,
|
||||
spec: parseSpec(rawPmSpec, path.relative(initialCwd, selection.manifestPath)),
|
||||
};
|
||||
}
|
||||
|
||||
export async function persistPmSpec(updateTarget: string, locator: Locator, message: string) {
|
||||
const newSpec = `${locator.name}@^${locator.reference}`;
|
||||
const newSpec = `${locator.name}@^${locator.reference}`;
|
||||
|
||||
let res: boolean;
|
||||
try {
|
||||
res = await Enquirer.prompt([{
|
||||
type: `confirm`,
|
||||
name: `confirm`,
|
||||
initial: true,
|
||||
message: message.replace(`{}`, newSpec),
|
||||
}]);
|
||||
} catch (err) {
|
||||
if (err === ``) {
|
||||
res = false;
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
let res: boolean;
|
||||
try {
|
||||
res = await Enquirer.prompt([{
|
||||
type: `confirm`,
|
||||
name: `confirm`,
|
||||
initial: true,
|
||||
message: message.replace(`{}`, newSpec),
|
||||
}]);
|
||||
} catch (err) {
|
||||
if (err === ``) {
|
||||
res = false;
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
if (!res)
|
||||
throw new miscUtils.Cancellation();
|
||||
if (!res)
|
||||
throw new miscUtils.Cancellation();
|
||||
|
||||
const content = fs.existsSync(updateTarget)
|
||||
? await fs.promises.readFile(updateTarget, `utf8`)
|
||||
: `{}`;
|
||||
const content = fs.existsSync(updateTarget)
|
||||
? await fs.promises.readFile(updateTarget, `utf8`)
|
||||
: `{}`;
|
||||
|
||||
const data = JSON.parse(content);
|
||||
data.packageManager = newSpec;
|
||||
const data = JSON.parse(content);
|
||||
data.packageManager = newSpec;
|
||||
|
||||
const serialized = JSON.stringify(data, null, 2);
|
||||
await fs.promises.writeFile(updateTarget, `${serialized}\n`);
|
||||
const serialized = JSON.stringify(data, null, 2);
|
||||
await fs.promises.writeFile(updateTarget, `${serialized}\n`);
|
||||
}
|
||||
|
||||
export async function initProjectAndSpec(updateTarget: string, locator: Locator) {
|
||||
return await persistPmSpec(updateTarget, locator, `No configured project yet; set it to {}?`);
|
||||
return await persistPmSpec(updateTarget, locator, `No configured project yet; set it to {}?`);
|
||||
}
|
||||
|
||||
export async function initSpec(updateTarget: string, locator: Locator) {
|
||||
return await persistPmSpec(updateTarget, locator, `No configured local package manager yet; set it to {}?`);
|
||||
return await persistPmSpec(updateTarget, locator, `No configured local package manager yet; set it to {}?`);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,29 +1,29 @@
|
|||
export type BinSpec = {[key: string]: string};
|
||||
export type BinList = string[];
|
||||
export type BinList = Array<string>;
|
||||
|
||||
export enum SupportedPackageManagers {
|
||||
Npm = `npm`,
|
||||
Pnpm = `pnpm`,
|
||||
Yarn = `yarn`,
|
||||
Npm = `npm`,
|
||||
Pnpm = `pnpm`,
|
||||
Yarn = `yarn`,
|
||||
}
|
||||
|
||||
export const SupportedPackageManagerSet = new Set<string>(
|
||||
Object.values(SupportedPackageManagers),
|
||||
Object.values(SupportedPackageManagers),
|
||||
);
|
||||
|
||||
export function isSupportedPackageManager(value: string): value is SupportedPackageManagers {
|
||||
return SupportedPackageManagerSet.has(value);
|
||||
return SupportedPackageManagerSet.has(value);
|
||||
}
|
||||
|
||||
export interface NpmTagSpec {
|
||||
type: `npm`;
|
||||
package: string;
|
||||
type: `npm`;
|
||||
package: string;
|
||||
}
|
||||
|
||||
export interface GitTagSpec {
|
||||
type: `git`;
|
||||
repository: string;
|
||||
pattern: string;
|
||||
type: `git`;
|
||||
repository: string;
|
||||
pattern: string;
|
||||
}
|
||||
|
||||
export type TagSpec =
|
||||
|
|
@ -34,23 +34,24 @@ export type TagSpec =
|
|||
* Defines how the package manager is meant to be downloaded and accessed.
|
||||
*/
|
||||
export interface PackageManagerSpec {
|
||||
url: string;
|
||||
bin: BinSpec | BinList;
|
||||
tags: TagSpec;
|
||||
};
|
||||
url: string;
|
||||
bin: BinSpec | BinList;
|
||||
tags: TagSpec;
|
||||
}
|
||||
|
||||
/**
|
||||
* The data structure found in config.json
|
||||
*/
|
||||
export interface Config {
|
||||
definitions: {
|
||||
[name in SupportedPackageManagers]?: {
|
||||
default: string;
|
||||
ranges: {
|
||||
[range: string]: PackageManagerSpec;
|
||||
};
|
||||
};
|
||||
definitions: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
[name in SupportedPackageManagers]?: {
|
||||
default: string;
|
||||
ranges: {
|
||||
[range: string]: PackageManagerSpec;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -58,28 +59,28 @@ export interface Config {
|
|||
* manager to use for the active project.
|
||||
*/
|
||||
export interface Descriptor {
|
||||
/**
|
||||
/**
|
||||
* The name of the package manager required.
|
||||
*/
|
||||
name: SupportedPackageManagers;
|
||||
name: SupportedPackageManagers;
|
||||
|
||||
/**
|
||||
/**
|
||||
* The range of versions allowed.
|
||||
*/
|
||||
range: string;
|
||||
range: string;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
export interface Locator {
|
||||
/**
|
||||
/**
|
||||
* The name of the package manager required.
|
||||
*/
|
||||
name: SupportedPackageManagers;
|
||||
name: SupportedPackageManagers;
|
||||
|
||||
/**
|
||||
/**
|
||||
* The exact version required.
|
||||
*/
|
||||
reference: string;
|
||||
reference: string;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,36 +1,36 @@
|
|||
import {PortablePath, npath} from '@yarnpkg/fslib';
|
||||
import {PassThrough} from 'stream';
|
||||
|
||||
import {Engine} from '../sources/Engine';
|
||||
import {main} from '../sources/main';
|
||||
import {Engine} from '../sources/Engine';
|
||||
import {main} from '../sources/main';
|
||||
|
||||
export async function runCli(cwd: PortablePath, argv: string[]) {
|
||||
const stdin = new PassThrough();
|
||||
const stdout = new PassThrough();
|
||||
const stderr = new PassThrough();
|
||||
export async function runCli(cwd: PortablePath, argv: Array<string>) {
|
||||
const stdin = new PassThrough();
|
||||
const stdout = new PassThrough();
|
||||
const stderr = new PassThrough();
|
||||
|
||||
const out: Buffer[] = [];
|
||||
const err: Buffer[] = [];
|
||||
const out: Array<Buffer> = [];
|
||||
const err: Array<Buffer> = [];
|
||||
|
||||
stdout.on(`data`, chunk => {
|
||||
out.push(chunk);
|
||||
});
|
||||
stdout.on(`data`, chunk => {
|
||||
out.push(chunk);
|
||||
});
|
||||
|
||||
stderr.on(`data`, chunk => {
|
||||
err.push(chunk);
|
||||
});
|
||||
stderr.on(`data`, chunk => {
|
||||
err.push(chunk);
|
||||
});
|
||||
|
||||
const exitCode = await main(argv, {
|
||||
cwd: npath.fromPortablePath(cwd),
|
||||
engine: new Engine(),
|
||||
stdin,
|
||||
stdout,
|
||||
stderr,
|
||||
});
|
||||
const exitCode = await main(argv, {
|
||||
cwd: npath.fromPortablePath(cwd),
|
||||
engine: new Engine(),
|
||||
stdin,
|
||||
stdout,
|
||||
stderr,
|
||||
});
|
||||
|
||||
return {
|
||||
exitCode,
|
||||
stdout: Buffer.concat(out).toString(),
|
||||
stderr: Buffer.concat(err).toString(),
|
||||
};
|
||||
return {
|
||||
exitCode,
|
||||
stdout: Buffer.concat(out).toString(),
|
||||
stderr: Buffer.concat(err).toString(),
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,133 +1,195 @@
|
|||
import {Filename, ppath, xfs, PortablePath} from '@yarnpkg/fslib';
|
||||
import {Filename, ppath, xfs} from '@yarnpkg/fslib';
|
||||
import Enquirer from 'enquirer';
|
||||
|
||||
import config from '../config.json';
|
||||
|
||||
import {runCli} from './_runCli';
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env.PMM_HOME = await xfs.mktempPromise();
|
||||
process.env.COREPACK_HOME = await xfs.mktempPromise();
|
||||
});
|
||||
|
||||
for (const [name, version] of [[`yarn`, `1.22.4`], [`yarn`, `2.0.0-rc.30`], [`pnpm`, `4.11.6`], [`npm`, `6.14.2`]]) {
|
||||
it(`should use the right package manager version for a given project (${name}@${version})`, async () => {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `${name}@${version}`,
|
||||
});
|
||||
it(`should use the right package manager version for a given project (${name}@${version})`, async () => {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `${name}@${version}`,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [name, name, `--version`])).resolves.toMatchObject({
|
||||
exitCode: 0,
|
||||
stdout: `${version}\n`,
|
||||
});
|
||||
});
|
||||
await expect(runCli(cwd, [name, name, `--version`])).resolves.toMatchObject({
|
||||
exitCode: 0,
|
||||
stdout: `${version}\n`,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
it(`shouldn't allow to use Yarn for npm-configured projects`, async () => {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `npm@6.14.2`,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
exitCode: 1,
|
||||
});
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `npm@6.14.2`,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
exitCode: 1,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it(`should request for the project to be configured if it doesn't exist`, async () => {
|
||||
// @ts-ignore
|
||||
const spy = jest.spyOn(Enquirer, `prompt`, `get`)
|
||||
// @ts-ignore
|
||||
.mockReturnValue(() => Promise.resolve(true));
|
||||
// @ts-ignore
|
||||
const spy = jest.spyOn(Enquirer, `prompt`, `get`)
|
||||
// @ts-ignore
|
||||
.mockReturnValue(() => Promise.resolve(true));
|
||||
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`])).resolves.toMatchObject({
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
||||
await expect(xfs.readJsonPromise(ppath.join(cwd, `package.json` as Filename))).resolves.toEqual({
|
||||
packageManager: expect.stringMatching(/^yarn@/),
|
||||
});
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`])).resolves.toMatchObject({
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
||||
await expect(xfs.readJsonPromise(ppath.join(cwd, `package.json` as Filename))).resolves.toEqual({
|
||||
packageManager: expect.stringMatching(/^yarn@/),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it(`should use the pinned version when local projects don't list any spec`, async () => {
|
||||
// Note that we don't prevent using any package manager. This ensures that
|
||||
// projects will receive as little disruption as possible (for example, we
|
||||
// don't prompt to set the packageManager field).
|
||||
// Note that we don't prevent using any package manager. This ensures that
|
||||
// projects will receive as little disruption as possible (for example, we
|
||||
// don't prompt to set the packageManager field).
|
||||
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
// empty package.json file
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.yarn.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`pnpm`, `pnpm`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.pnpm.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`npm`, `npm`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.npm.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
// empty package.json file
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.yarn.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`pnpm`, `pnpm`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.pnpm.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`npm`, `npm`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.npm.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it(`should allow updating the pinned version using the "prepare" command`, async () => {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await runCli(cwd, [`prepare`, `--cache-only`, `--activate`, `yarn@1.0.0`]);
|
||||
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
// empty package.json file
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `1.0.0\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it(`should allow to call "prepare" without arguments within a configured project`, async () => {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `yarn@1.0.0`,
|
||||
});
|
||||
|
||||
await runCli(cwd, [`prepare`, `--cache-only`, `--activate`]);
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `1.0.0\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it(`should allow to call "prepare" with --all to prepare all package managers`, async () => {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
// empty package.json file
|
||||
});
|
||||
|
||||
await runCli(cwd, [`prepare`, `--cache-only`, `--all`]);
|
||||
|
||||
process.env.COREPACK_ENABLE_NETWORK = `0`;
|
||||
|
||||
try {
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.yarn.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`pnpm`, `pnpm`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.pnpm.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`npm`, `npm`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `${config.definitions.npm.default}\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
} finally {
|
||||
delete process.env.COREPACK_ENABLE_NETWORK;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it(`should support disabling the network accesses from the environment`, async () => {
|
||||
process.env.PMM_ENABLE_NETWORK = `0`;
|
||||
process.env.COREPACK_ENABLE_NETWORK = `0`;
|
||||
|
||||
try {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `yarn@2.2.2`,
|
||||
});
|
||||
try {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `yarn@2.2.2`,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: expect.stringContaining(`Network access disabled by the environment`),
|
||||
exitCode: 1,
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
delete process.env.PMM_ENABLE_NETWORK;
|
||||
}
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: expect.stringContaining(`Network access disabled by the environment`),
|
||||
exitCode: 1,
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
delete process.env.COREPACK_ENABLE_NETWORK;
|
||||
}
|
||||
});
|
||||
|
||||
it(`should support hydrating package managers from cached archives`, async () => {
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await expect(runCli(cwd, [`pack`, `yarn@2.2.2`])).resolves.toMatchObject({
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
// Use a new cache
|
||||
process.env.PMM_HOME = await xfs.mktempPromise();
|
||||
|
||||
// Disable the network to make sure we don't succeed by accident
|
||||
process.env.PMM_ENABLE_NETWORK = `0`;
|
||||
|
||||
try {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `yarn@2.2.2`,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`hydrate`, `pmm-yarn-2.2.2.tgz`])).resolves.toMatchObject({
|
||||
stdout: ``,
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `2.2.2\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
} finally {
|
||||
delete process.env.PMM_ENABLE_NETWORK;
|
||||
}
|
||||
await xfs.mktempPromise(async cwd => {
|
||||
await expect(runCli(cwd, [`prepare`, `yarn@2.2.2`])).resolves.toMatchObject({
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
// Use a new cache
|
||||
process.env.COREPACK_HOME = await xfs.mktempPromise();
|
||||
|
||||
// Disable the network to make sure we don't succeed by accident
|
||||
process.env.COREPACK_ENABLE_NETWORK = `0`;
|
||||
|
||||
try {
|
||||
await xfs.writeJsonPromise(ppath.join(cwd, `package.json` as Filename), {
|
||||
packageManager: `yarn@2.2.2`,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`hydrate`, `corepack-yarn-2.2.2.tgz`])).resolves.toMatchObject({
|
||||
stdout: `Hydrated yarn@2.2.2\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
await expect(runCli(cwd, [`yarn`, `yarn`, `--version`])).resolves.toMatchObject({
|
||||
stdout: `2.2.2\n`,
|
||||
exitCode: 0,
|
||||
});
|
||||
} finally {
|
||||
delete process.env.COREPACK_ENABLE_NETWORK;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue