Compare commits

..

No commits in common. "main" and "v1.4.0-beta.2" have entirely different histories.

1081 changed files with 27558 additions and 178499 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

15
.babelrc Normal file
View File

@ -0,0 +1,15 @@
{
"presets": [
"@babel/preset-env",
"@babel/preset-typescript",
"@babel/preset-react"
],
"plugins": [
[
"@babel/plugin-proposal-decorators",
{
"legacy": true
}
]
]
}

View File

@ -1 +1 @@
**/node_modules/
node_modules

View File

@ -1,10 +0,0 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
[*.{js,json,yml}]
charset = utf-8
indent_style = space
indent_size = 2

View File

@ -5,12 +5,8 @@ coverage/
node_modules/
packages/velaux-ui/src/assets/
src/assets/
.DS_Store
public
*.d.ts
dist
pkg/
*.d.ts

View File

@ -1,56 +1,8 @@
module.exports = {
extends: ['@grafana/eslint-config'],
root: true,
plugins: ['@emotion', 'lodash', 'jest', 'import', 'jsx-a11y'],
extends: [require.resolve('@umijs/fabric/dist/eslint')],
globals: {
page: true,
__COMMIT_HASH__: 'readonly',
},
rules: {
eqeqeq: 'off',
'react/prop-types': 'off',
// need to ignore emotion's `css` prop, see https://github.com/jsx-eslint/eslint-plugin-react/blob/master/docs/rules/no-unknown-property.md#rule-options
// 'react/no-unknown-property': ['error', { ignore: ['css'] }],
// 'import/order': [
// 'error',
// {
// groups: [['builtin', 'external'], 'internal', 'parent', 'sibling', 'index'],
// 'newlines-between': 'always',
// alphabetize: { order: 'asc' },
// },
// ],
'import/order': 'off',
'react/jsx-key': 'off',
'no-restricted-imports': [
'error',
{
paths: [
{
name: 'react-redux',
importNames: ['useDispatch', 'useSelector'],
message: 'Please import from app/types instead.',
},
{
name: 'react-i18next',
importNames: ['Trans', 't'],
message: 'Please import from app/core/internationalization instead',
},
],
},
],
'no-duplicate-imports': 'off',
"import/no-duplicates": "warn",
'react/no-deprecated': 'off',
'react/no-unknown-property': 'off',
'@typescript-eslint/explicit-member-accessibility': 'off',
'@typescript-eslint/array-type': 'off',
},
overrides: [
{
files: ['packages/velaux-ui/src/utils/common.ts'],
rules: {
'no-redeclare': 'off',
},
},
],
rules: {},
};

4
.gitattributes vendored
View File

@ -1,4 +0,0 @@
/.yarn/** linguist-vendored
/.yarn/releases/* binary
/.yarn/plugins/**/* binary
/.pnp.* binary linguist-generated

6
.github/CODEOWNERS vendored
View File

@ -1,7 +1,5 @@
# This file is a github code protect rule follow the codeowners https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/creating-a-repository-on-github/about-code-owners#example-of-a-codeowners-file
* @barnettZQG @wonderflow @chivalryq
* @barnettZQG @wonderflow
pkg @barnettZQG @chivalryq @wangyikewxgm @yangsoon @FogDong
packages @barnettZQG @chivalryq
src/ @wangbow

View File

@ -14,8 +14,7 @@ I have:
- [ ] Read and followed KubeVela's [contribution process](https://github.com/oam-dev/kubevela/blob/master/contribute/create-pull-request.md).
- [ ] [Related Docs](https://github.com/oam-dev/kubevela.io) updated properly. In a new feature or configuration option, an update to the documentation is necessary.
- [ ] Run `yarn lint` to ensure the frontend changes are ready for review.
- [ ] Run `make reviewable`to ensure the server changes are ready for review.
- [ ] Run `yarn lint` to ensure this PR is ready for review.
- [ ] Added `backport release-x.y` labels to auto-backport this PR if necessary.
### Special notes for your reviewer

View File

@ -1,11 +0,0 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"

View File

@ -1,64 +0,0 @@
name: Arm64 Build Test
on:
push:
branches:
- main
- release-*
tags:
- v*
workflow_dispatch: { }
pull_request:
branches:
- main
- release-*
env:
# Common versions
GO_VERSION: '1.22.0'
permissions:
contents: read
jobs:
detect-noop:
runs-on: ubuntu-22.04
outputs:
noop: ${{ steps.noop.outputs.should_skip }}
steps:
- name: Detect No-op Changes
id: noop
uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
paths_ignore: '["**.md", "**.mdx", "**.png", "**.jpg"]'
do_not_skip: '["workflow_dispatch", "schedule", "push"]'
continue-on-error: true
arm64-build-test:
runs-on: ubuntu-22.04
needs: detect-noop
if: needs.detect-noop.outputs.noop != 'true'
steps:
- name: Check out code into the Go module directory
uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
with:
submodules: true
- name: Set up QEMU
uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4
- name: Build linux/arm64 image
id: docker_build_2
uses: docker/build-push-action@1a162644f9a7e87d8f4b053101d1d9a712edc18c
with:
context: ./
build-args: |
GOPROXY=https://proxy.golang.org
file: ./Dockerfile
platforms: linux/arm64
push: false
tags: oamdev/velaux:latest

View File

@ -7,16 +7,16 @@ on:
jobs:
# align with crossplane's choice https://github.com/crossplane/crossplane/blob/master/.github/workflows/backport.yml
open-pr:
runs-on: ubuntu-22.04
runs-on: ubuntu-20.04
if: github.event.pull_request.merged
steps:
- name: Checkout
uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Open Backport PR
uses: zeebe-io/backport-action@v0.0.9
uses: zeebe-io/backport-action@v0.0.6
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
github_workspace: ${{ github.workspace }}

48
.github/workflows/ci.yaml vendored Normal file
View File

@ -0,0 +1,48 @@
name: staticcheck
on:
push:
branches:
- main
- release-*
workflow_dispatch: {}
pull_request:
branches:
- main
- release-*
jobs:
detect-noop:
runs-on: ubuntu-20.04
outputs:
noop: ${{ steps.noop.outputs.should_skip }}
steps:
- name: Detect No-op Changes
id: noop
uses: fkirc/skip-duplicate-actions@v3.3.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
paths_ignore: '["**.md", "**.mdx", "**.png", "**.jpg"]'
do_not_skip: '["workflow_dispatch", "schedule", "push"]'
concurrent_skipping: false
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '14'
- run: yarn install
- run: yarn lint
- run: yarn tsc
- run: yarn test
- name: Build docker image
id: docker_build
uses: docker/build-push-action@v2
with:
context: ./
file: ./Dockerfile
push: false
tags: oamdev/velaux:1.2.0

View File

@ -1,41 +0,0 @@
name: "CodeQL"
on:
push:
branches: [ "main", "release-1.2", "release-1.3", "release-1.4", "release-1.5", "release-1.6" ]
pull_request:
branches: [ "main" ]
schedule:
- cron: "40 0 * * 5"
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ go, javascript ]
steps:
- name: Checkout
uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
queries: +security-and-quality
- name: Autobuild
uses: github/codeql-action/autobuild@v3
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{ matrix.language }}"

View File

@ -11,11 +11,9 @@ on:
jobs:
check:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: thehanimo/pr-title-checker@v1.4.2
- uses: thehanimo/pr-title-checker@v1.3.1
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
pass_on_octokit_error: true
configuration_path: '.github/pr-title-checker-config.json'
configuration_path: ".github/pr-title-checker-config.json"

View File

@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
uses: actions/checkout@v2
- name: Build Vela Core image from Dockerfile
run: |
@ -23,7 +23,7 @@ jobs:
output: 'trivy-results.sarif'
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v3
uses: github/codeql-action/upload-sarif@v1
if: always()
with:
sarif_file: 'trivy-results.sarif'
sarif_file: 'trivy-results.sarif'

View File

@ -5,53 +5,23 @@ on:
issue_comment:
types: [created]
permissions:
contents: read
jobs:
bot:
runs-on: ubuntu-22.04
steps:
- name: Checkout Actions
uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
with:
repository: "oam-dev/kubevela-github-actions"
path: ./actions
ref: v0.4.2
- name: Setup Node.js
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: ./actions/package-lock.json
- name: Install Dependencies
run: npm ci --production --prefix ./actions
- name: Run Commands
uses: ./actions/commands
with:
token: ${{secrets.VELA_BOT_TOKEN}}
configPath: issue-commands
backport:
runs-on: ubuntu-22.04
runs-on: ubuntu-18.04
if: github.event.issue.pull_request && contains(github.event.comment.body, '/backport')
permissions:
contents: write
pull-requests: write
issues: write
steps:
- name: Extract Command
id: command
uses: xt0rted/slash-command-action@bf51f8f5f4ea3d58abc7eca58f77104182b23e88
uses: xt0rted/slash-command-action@v1
with:
repo-token: ${{ secrets.VELA_BOT_TOKEN }}
repo-token: ${{ secrets.GITHUB_TOKEN }}
command: backport
reaction: "true"
reaction-type: "eyes"
allow-edits: "false"
permission-level: read
- name: Handle Command
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea
uses: actions/github-script@v4
env:
VERSION: ${{ steps.command.outputs.command-arguments }}
with:
@ -63,7 +33,7 @@ jobs:
label = "backport " + version
}
// Add our backport label.
github.rest.issues.addLabels({
github.issues.addLabels({
// Every pull request is an issue, but not every issue is a pull request.
issue_number: context.issue.number,
owner: context.repo.owner,
@ -72,70 +42,11 @@ jobs:
})
console.log("Added '" + label + "' label.")
- name: Checkout
uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Open Backport PR
uses: zeebe-io/backport-action@v0.0.9
uses: zeebe-io/backport-action@v0.0.6
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
github_workspace: ${{ github.workspace }}
retest:
runs-on: ubuntu-22.04
if: github.event.issue.pull_request && contains(github.event.comment.body, '/retest')
permissions:
actions: write
contents: write
pull-requests: write
issues: write
steps:
- name: Retest the current pull request
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea
env:
PULL_REQUEST_ID: ${{ github.event.issue.number }}
COMMENT_ID: ${{ github.event.comment.id }}
COMMENT_BODY: ${{ github.event.comment.body }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const pull_request_id = process.env.PULL_REQUEST_ID
const comment_id = process.env.COMMENT_ID
const comment_body = process.env.COMMENT_BODY
console.log("retest pr: #" + pull_request_id + " comment: " + comment_body)
const {data: pr} = await github.rest.pulls.get({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: pull_request_id,
})
console.log("pr: " + JSON.stringify(pr))
const action = comment_body.split(" ")[0]
let workflow_ids = comment_body.split(" ").slice(1).filter(line => line.length > 0).map(line => line + ".yml")
if (workflow_ids.length == 0) workflow_ids = ["staticcheck.yml", "server-test.yml", "arm64-build-test.yml"]
for (let i = 0; i < workflow_ids.length; i++) {
const workflow_id = workflow_ids[i]
const {data: runs} = await github.rest.actions.listWorkflowRuns({
owner: context.repo.owner,
repo: context.repo.repo,
workflow_id: workflow_id,
head_sha: pr.head.sha,
})
console.log("runs for " + workflow_id + ": ", JSON.stringify(runs))
runs.workflow_runs.forEach((workflow_run) => {
if (workflow_run.status === "in_progress") return
let handler = github.rest.actions.reRunWorkflow
if (action === "/retest-failed") handler = github.rest.actions.reRunWorkflowFailedJobs
handler({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: workflow_run.id
})
})
}
github.rest.reactions.createForIssueComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: comment_id,
content: "eyes",
});
github_workspace: ${{ github.workspace }}

View File

@ -10,7 +10,7 @@ jobs:
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
- uses: actions/checkout@v2
- name: Get the version
id: get_version
run: |
@ -19,32 +19,27 @@ jobs:
VERSION=latest
fi
echo ::set-output name=VERSION::${VERSION}
- name: Get git revision
id: vars
shell: bash
run: |
echo "git_revision=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Login to DockerHub
uses: docker/login-action@v3.2.0
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- uses: docker/setup-qemu-action@v3.1.0
- uses: docker/setup-buildx-action@v3.4.0
- name: Login to Aliyun Container Registry (ACR)
uses: docker/login-action@v1
with:
driver-opts: image=moby/buildkit:master
registry: acr.kubevela.net
username: "${{ secrets.ACR_USERNAME }}"
password: "${{ secrets.ACR_PASSWORD }}"
- name: Build docker image
id: acr_build
uses: docker/build-push-action@v6.3.0
uses: docker/build-push-action@v2
with:
context: ./
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: true
build-args: |
GITVERSION=git-${{ steps.vars.outputs.git_revision }}
VERSION=${{ steps.get_version.outputs.VERSION }}
GOPROXY=https://proxy.golang.org
tags: |-
acr.kubevela.net/oamdev/velaux:${{ steps.get_version.outputs.VERSION }}
oamdev/velaux:${{ steps.get_version.outputs.VERSION }}

View File

@ -1,204 +0,0 @@
name: VelaUX APIServer Test
on:
push:
branches:
- main
- release-*
tags:
- v*
workflow_dispatch: { }
pull_request:
branches:
- main
- release-*
env:
# Common versions
GO_VERSION: '1.22.0'
permissions:
contents: read
jobs:
detect-noop:
runs-on: ubuntu-22.04
outputs:
noop: ${{ steps.noop.outputs.should_skip }}
steps:
- name: Detect No-op Changes
id: noop
uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
paths_ignore: '["**.md", "**.mdx", "**.png", "**.jpg"]'
do_not_skip: '["workflow_dispatch", "schedule", "push"]'
continue-on-error: true
server-unit-tests:
runs-on: ubuntu-22.04
needs: detect-noop
if: needs.detect-noop.outputs.noop != 'true'
steps:
- name: Set up Go
uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7
with:
go-version: ${{ env.GO_VERSION }}
id: go
- name: Check out code into the Go module directory
uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
with:
submodules: true
- name: Cache Go Dependencies
uses: actions/cache@v4
with:
path: .work/pkg
key: ${{ runner.os }}-pkg-${{ hashFiles('**/go.sum') }}
restore-keys: ${{ runner.os }}-pkg-
- name: Install ginkgo
run: |
sudo sed -i 's/azure\.//' /etc/apt/sources.list
sudo apt-get update
sudo apt-get install -y golang-ginkgo-dev
- name : Set up MySQL
uses: mirromutth/mysql-action@v1.1
with:
mysql database: 'kubevela'
mysql root password: 'kubevelaSQL123'
- name: Set up Postgres
uses: Harmon758/postgresql-action@v1
with:
postgresql version: '11'
postgresql db: 'kubevela'
postgresql user: 'kubevela'
postgresql password: 'Kubevela-123'
- name: Start MongoDB
uses: supercharge/mongodb-github-action@5a87bd81f88e2a8b195f8b7b656f5cda1350815a # 1.11.0
with:
mongodb-version: '5.0'
# TODO need update action version to resolve node 12 deprecated.
- name: install Kubebuilder
uses: RyanSiu1995/kubebuilder-action@e7e4de1c1eaf1d089b9a186f7526239acadf0b40
with:
version: 3.1.0
kubebuilderOnly: false
kubernetesVersion: v1.26.0
- name: Run api server unit test
run: make unit-test-server
- name: Upload coverage report
uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: ./coverage.txt
flags: apiserver-unittests
name: codecov-umbrella
server-e2e-tests:
runs-on: ubuntu-22.04
needs: [ detect-noop ]
if: needs.detect-noop.outputs.noop != 'true'
strategy:
matrix:
k8s-version: ["v1.26"]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.k8s-version }}
cancel-in-progress: true
steps:
- name: Set up Go
uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7
with:
go-version: ${{ env.GO_VERSION }}
id: go
- name: Check out code into the Go module directory
uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
with:
submodules: true
- name: Build docker image
id: docker_build
uses: docker/build-push-action@1a162644f9a7e87d8f4b053101d1d9a712edc18c
with:
context: ./
build-args: |
GOPROXY=https://proxy.golang.org
file: ./Dockerfile.e2e
platforms: linux/amd64
push: false
tags: oamdev/velaux:latest
- name: Tear down K3d if exist
run: |
k3d cluster delete || true
k3d cluster delete worker || true
- name: Calculate K3d args
run: |
EGRESS_ARG=""
if [[ "${{ matrix.k8s-version }}" == v1.26 ]]; then
EGRESS_ARG="--k3s-arg --egress-selector-mode=disabled@server:0"
fi
echo "EGRESS_ARG=${EGRESS_ARG}" >> $GITHUB_ENV
- name: Setup K3d (Hub)
uses: nolar/setup-k3d-k3s@293b8e5822a20bc0d5bcdd4826f1a665e72aba96
with:
version: ${{ matrix.k8s-version }}
github-token: ${{ secrets.GITHUB_TOKEN }}
k3d-args: ${{ env.EGRESS_ARG }}
- name: Setup K3d (Worker)
uses: nolar/setup-k3d-k3s@293b8e5822a20bc0d5bcdd4826f1a665e72aba96
with:
version: ${{ matrix.k8s-version }}
github-token: ${{ secrets.GITHUB_TOKEN }}
k3d-name: worker
k3d-args: --kubeconfig-update-default=false --network=k3d-k3s-default ${{ env.EGRESS_ARG }}
- name: Kind Cluster (Worker)
run: |
internal_ip=$(docker network inspect k3d-k3s-default|jq ".[0].Containers"| jq -r '.[]| select(.Name=="k3d-worker-server-0")|.IPv4Address' | cut -d/ -f1)
k3d kubeconfig get worker > /tmp/worker.client.kubeconfig
cp /tmp/worker.client.kubeconfig /tmp/worker.kubeconfig
sed -i "s/0.0.0.0:[0-9]\+/$internal_ip:6443/" /tmp/worker.kubeconfig
- name: Setup KubeVela core environment
run: |
make e2e-setup-core
make start-addon-mock-server
make load-image
make enable-addon-no-replicas
- name: Run server e2e test
run: |
export ALIYUN_ACCESS_KEY_ID=${{ secrets.ALIYUN_ACCESS_KEY_ID }}
export ALIYUN_ACCESS_KEY_SECRET=${{ secrets.ALIYUN_ACCESS_KEY_SECRET }}
export GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}
docker run --rm -v `pwd`/e2e-plugins:/plugins oamdev/velaux:latest cp -r -a /app/velaux/plugins/ /plugins/
make e2e-server-test
- name: Test addon enable
run: |
make enable-addon
- name: Upload coverage report
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: /tmp/e2e_apiserver_test.out
flags: server-e2e-tests
name: codecov-umbrella
- name: Clean e2e profile
run: rm /tmp/e2e_apiserver_test.out

View File

@ -1,54 +0,0 @@
name: Static Check
on:
push:
branches:
- main
- release-*
workflow_dispatch: {}
pull_request:
branches:
- main
- release-*
env:
# Common versions
GO_VERSION: '1.22.0'
jobs:
detect-noop:
runs-on: ubuntu-22.04
outputs:
noop: ${{ steps.noop.outputs.should_skip }}
steps:
- name: Detect No-op Changes
id: noop
uses: fkirc/skip-duplicate-actions@v5.3.1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
paths_ignore: '["**.md", "**.mdx", "**.png", "**.jpg"]'
do_not_skip: '["workflow_dispatch", "schedule", "push"]'
concurrent_skipping: false
check:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@6ccd57f4c5d15bdc2fef309bd9fb6cc9db2ef1c6
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b
with:
node-version: '16'
cache: 'yarn'
- name: Set up Go
uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7
with:
go-version: ${{ env.GO_VERSION }}
id: go
- name: Install Yarn
run: npm install --global yarn
- name: upgrade yarn
run: yarn set version 3.6.0
- run: yarn install
- run: yarn lint
name: Check Frontend Code Style
- run: yarn test
- run: make check-diff
name: Check Server Code Style

18
.gitignore vendored
View File

@ -27,20 +27,4 @@ build
package-lock.json
yarn-error.log
.DS_Store
dist
/packages/**/compiled
.yarn/.cache/
.yarn/cache/
.yarn/unplugged
.yarn/install-state.gz
.pnp.*
tsconfig.tsbuildinfo
e2e-plugins
npm-artifacts
bin
.DS_Store

View File

@ -1,226 +0,0 @@
run:
timeout: 10m
skip-files:
- "zz_generated\\..+\\.go$"
- ".*_test.go$"
skip-dirs:
- "hack"
- "e2e"
output:
# colored-line-number|line-number|json|tab|checkstyle|code-climate, default is "colored-line-number"
format: colored-line-number
linters-settings:
errcheck:
# report about not checking of errors in type assetions: `a := b.(MyStruct)`;
# default is false: such cases aren't reported by default.
check-type-assertions: false
# report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`;
# default is false: such cases aren't reported by default.
check-blank: false
# [deprecated] comma-separated list of pairs of the form pkg:regex
# the regex is used to ignore names within pkg. (default "fmt:.*").
# see https://github.com/kisielk/errcheck#the-deprecated-method for details
ignore: fmt:.*,io/ioutil:^Read.*
exhaustive:
# indicates that switch statements are to be considered exhaustive if a
# 'default' case is present, even if all enum members aren't listed in the
# switch
default-signifies-exhaustive: true
govet:
# report about shadowed variables
check-shadowing: false
revive:
# minimal confidence for issues, default is 0.8
min-confidence: 0.8
gofmt:
# simplify code: gofmt with `-s` option, true by default
simplify: true
goimports:
# put imports beginning with prefix after 3rd-party packages;
# it's a comma-separated list of prefixes
local-prefixes: github.com/kubevela/velaux
gocyclo:
# minimal code complexity to report, 30 by default (but we recommend 10-20)
min-complexity: 30
maligned:
# print struct with more effective memory layout or not, false by default
suggest-new: true
dupl:
# tokens count to trigger issue, 150 by default
threshold: 100
goconst:
# minimal length of string constant, 3 by default
min-len: 3
# minimal occurrences count to trigger, 3 by default
min-occurrences: 5
lll:
# tab width in spaces. Default to 1.
tab-width: 1
unused:
# treat code as a program (not a library) and report unused exported identifiers; default is false.
# XXX: if you enable this setting, unused will report a lot of false-positives in text editors:
# if it's called for subdir of a project it can't find funcs usages. All text editor integrations
# with golangci-lint call it on a directory with the changed file.
check-exported: false
unparam:
# Inspect exported functions, default is false. Set to true if no external program/library imports your code.
# XXX: if you enable this setting, unparam will report a lot of false-positives in text editors:
# if it's called for subdir of a project it can't find external interfaces. All text editor integrations
# with golangci-lint call it on a directory with the changed file.
check-exported: false
nakedret:
# make an issue if func has more lines of code than this setting and it has naked returns; default is 30
max-func-lines: 30
gocritic:
# Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint` run to see all tags and checks.
# Empty list by default. See https://github.com/go-critic/go-critic#usage -> section "Tags".
enabled-tags:
- performance
settings: # settings passed to gocritic
captLocal: # must be valid enabled check name
paramsOnly: true
rangeValCopy:
sizeThreshold: 32
makezero:
# Allow only slices initialized with a length of zero. Default is false.
always: false
linters:
enable:
- megacheck
- govet
- gocyclo
- gocritic
- goconst
- goimports
- gofmt # We enable this as well as goimports for its simplify mode.
- revive
- unconvert
- misspell
- nakedret
- exportloopref
disable:
- deadcode
- scopelint
- structcheck
- varcheck
- rowserrcheck
- sqlclosecheck
- errchkjson
- contextcheck
presets:
- bugs
- unused
fast: false
issues:
# Excluding configuration per-path and per-linter
exclude-rules:
# Exclude some linters from running on tests files.
- path: _test(ing)?\.go
linters:
- gocyclo
- errcheck
- dupl
- gosec
- exportloopref
- unparam
# Ease some gocritic warnings on test files.
- path: _test\.go
text: "(unnamedResult|exitAfterDefer)"
linters:
- gocritic
# These are performance optimisations rather than style issues per se.
# They warn when function arguments or range values copy a lot of memory
# rather than using a pointer.
- text: "(hugeParam|rangeValCopy):"
linters:
- gocritic
# This "TestMain should call os.Exit to set exit code" warning is not clever
# enough to notice that we call a helper method that calls os.Exit.
- text: "SA3000:"
linters:
- staticcheck
- text: "k8s.io/api/core/v1"
linters:
- goimports
# This is a "potential hardcoded credentials" warning. It's triggered by
# any variable with 'secret' in the same, and thus hits a lot of false
# positives in Kubernetes land where a Secret is an object type.
- text: "G101:"
linters:
- gosec
- gas
# This is an 'errors unhandled' warning that duplicates errcheck.
- text: "G104:"
linters:
- gosec
- gas
# The Azure AddToUserAgent method appends to the existing user agent string.
# It returns an error if you pass it an empty string lettinga you know the
# user agent did not change, making it more of a warning.
- text: \.AddToUserAgent
linters:
- errcheck
- text: "don't use an underscore"
linters:
- revive
- text: "package-comments: should have a package comment"
linters:
- revive
- text: "error-strings: error strings should not be capitalized or end with punctuation or a newline"
linters:
- revive
# Independently from option `exclude` we use default exclude patterns,
# it can be disabled by this option. To list all
# excluded by default patterns execute `golangci-lint run --help`.
# Default value for this option is true.
exclude-use-default: false
# Show only new issues: if there are unstaged changes or untracked files,
# only those changes are analyzed, else only changes in HEAD~ are analyzed.
# It's a super-useful option for integration of golangci-lint into existing
# large codebase. It's not practical to fix all existing issues at the moment
# of integration: much better don't allow issues in new code.
# Default is false.
new: false
# Maximum issues count per one linter. Set to 0 to disable. Default is 50.
max-per-linter: 0
# Maximum count of issues with the same text. Set to 0 to disable. Default is 3.
max-same-issues: 0

View File

@ -1,5 +1,5 @@
const fabric = require('@umijs/fabric');
module.exports = {
trailingComma: 'es5',
singleQuote: true,
printWidth: 120,
...fabric.prettier,
};

File diff suppressed because one or more lines are too long

View File

@ -1,20 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require eslint/bin/eslint.js
require(absPnpApiPath).setup();
}
}
// Defer to the real eslint/bin/eslint.js your application uses
module.exports = absRequire(`eslint/bin/eslint.js`);

View File

@ -1,20 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require eslint
require(absPnpApiPath).setup();
}
}
// Defer to the real eslint your application uses
module.exports = absRequire(`eslint`);

View File

@ -1,6 +0,0 @@
{
"name": "eslint",
"version": "8.34.0-sdk",
"main": "./lib/api.js",
"type": "commonjs"
}

View File

@ -1,5 +0,0 @@
# This file is automatically generated by @yarnpkg/sdks.
# Manual changes might be lost!
integrations:
- vscode

View File

@ -1,20 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require prettier/index.js
require(absPnpApiPath).setup();
}
}
// Defer to the real prettier/index.js your application uses
module.exports = absRequire(`prettier/index.js`);

View File

@ -1,6 +0,0 @@
{
"name": "prettier",
"version": "2.8.7-sdk",
"main": "./index.js",
"type": "commonjs"
}

View File

@ -1,20 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/bin/tsc
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/bin/tsc your application uses
module.exports = absRequire(`typescript/bin/tsc`);

View File

@ -1,20 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/bin/tsserver
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/bin/tsserver your application uses
module.exports = absRequire(`typescript/bin/tsserver`);

View File

@ -1,20 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/lib/tsc.js
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/lib/tsc.js your application uses
module.exports = absRequire(`typescript/lib/tsc.js`);

View File

@ -1,225 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
const moduleWrapper = tsserver => {
if (!process.versions.pnp) {
return tsserver;
}
const {isAbsolute} = require(`path`);
const pnpApi = require(`pnpapi`);
const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//);
const isPortal = str => str.startsWith("portal:/");
const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`);
const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => {
return `${locator.name}@${locator.reference}`;
}));
// VSCode sends the zip paths to TS using the "zip://" prefix, that TS
// doesn't understand. This layer makes sure to remove the protocol
// before forwarding it to TS, and to add it back on all returned paths.
function toEditorPath(str) {
// We add the `zip:` prefix to both `.zip/` paths and virtual paths
if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) {
// We also take the opportunity to turn virtual paths into physical ones;
// this makes it much easier to work with workspaces that list peer
// dependencies, since otherwise Ctrl+Click would bring us to the virtual
// file instances instead of the real ones.
//
// We only do this to modules owned by the the dependency tree roots.
// This avoids breaking the resolution when jumping inside a vendor
// with peer dep (otherwise jumping into react-dom would show resolution
// errors on react).
//
const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str;
if (resolved) {
const locator = pnpApi.findPackageLocator(resolved);
if (locator && (dependencyTreeRoots.has(`${locator.name}@${locator.reference}`) || isPortal(locator.reference))) {
str = resolved;
}
}
str = normalize(str);
if (str.match(/\.zip\//)) {
switch (hostInfo) {
// Absolute VSCode `Uri.fsPath`s need to start with a slash.
// VSCode only adds it automatically for supported schemes,
// so we have to do it manually for the `zip` scheme.
// The path needs to start with a caret otherwise VSCode doesn't handle the protocol
//
// Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910
//
// 2021-10-08: VSCode changed the format in 1.61.
// Before | ^zip:/c:/foo/bar.zip/package.json
// After | ^/zip//c:/foo/bar.zip/package.json
//
// 2022-04-06: VSCode changed the format in 1.66.
// Before | ^/zip//c:/foo/bar.zip/package.json
// After | ^/zip/c:/foo/bar.zip/package.json
//
// 2022-05-06: VSCode changed the format in 1.68
// Before | ^/zip/c:/foo/bar.zip/package.json
// After | ^/zip//c:/foo/bar.zip/package.json
//
case `vscode <1.61`: {
str = `^zip:${str}`;
} break;
case `vscode <1.66`: {
str = `^/zip/${str}`;
} break;
case `vscode <1.68`: {
str = `^/zip${str}`;
} break;
case `vscode`: {
str = `^/zip/${str}`;
} break;
// To make "go to definition" work,
// We have to resolve the actual file system path from virtual path
// and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip)
case `coc-nvim`: {
str = normalize(resolved).replace(/\.zip\//, `.zip::`);
str = resolve(`zipfile:${str}`);
} break;
// Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server)
// We have to resolve the actual file system path from virtual path,
// everything else is up to neovim
case `neovim`: {
str = normalize(resolved).replace(/\.zip\//, `.zip::`);
str = `zipfile://${str}`;
} break;
default: {
str = `zip:${str}`;
} break;
}
} else {
str = str.replace(/^\/?/, process.platform === `win32` ? `` : `/`);
}
}
return str;
}
function fromEditorPath(str) {
switch (hostInfo) {
case `coc-nvim`: {
str = str.replace(/\.zip::/, `.zip/`);
// The path for coc-nvim is in format of /<pwd>/zipfile:/<pwd>/.yarn/...
// So in order to convert it back, we use .* to match all the thing
// before `zipfile:`
return process.platform === `win32`
? str.replace(/^.*zipfile:\//, ``)
: str.replace(/^.*zipfile:/, ``);
} break;
case `neovim`: {
str = str.replace(/\.zip::/, `.zip/`);
// The path for neovim is in format of zipfile:///<pwd>/.yarn/...
return str.replace(/^zipfile:\/\//, ``);
} break;
case `vscode`:
default: {
return str.replace(/^\^?(zip:|\/zip(\/ts-nul-authority)?)\/+/, process.platform === `win32` ? `` : `/`)
} break;
}
}
// Force enable 'allowLocalPluginLoads'
// TypeScript tries to resolve plugins using a path relative to itself
// which doesn't work when using the global cache
// https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238
// VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but
// TypeScript already does local loads and if this code is running the user trusts the workspace
// https://github.com/microsoft/vscode/issues/45856
const ConfiguredProject = tsserver.server.ConfiguredProject;
const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype;
ConfiguredProject.prototype.enablePluginsWithOptions = function() {
this.projectService.allowLocalPluginLoads = true;
return originalEnablePluginsWithOptions.apply(this, arguments);
};
// And here is the point where we hijack the VSCode <-> TS communications
// by adding ourselves in the middle. We locate everything that looks
// like an absolute path of ours and normalize it.
const Session = tsserver.server.Session;
const {onMessage: originalOnMessage, send: originalSend} = Session.prototype;
let hostInfo = `unknown`;
Object.assign(Session.prototype, {
onMessage(/** @type {string | object} */ message) {
const isStringMessage = typeof message === 'string';
const parsedMessage = isStringMessage ? JSON.parse(message) : message;
if (
parsedMessage != null &&
typeof parsedMessage === `object` &&
parsedMessage.arguments &&
typeof parsedMessage.arguments.hostInfo === `string`
) {
hostInfo = parsedMessage.arguments.hostInfo;
if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK) {
const [, major, minor] = (process.env.VSCODE_IPC_HOOK.match(
// The RegExp from https://semver.org/ but without the caret at the start
/(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/
) ?? []).map(Number)
if (major === 1) {
if (minor < 61) {
hostInfo += ` <1.61`;
} else if (minor < 66) {
hostInfo += ` <1.66`;
} else if (minor < 68) {
hostInfo += ` <1.68`;
}
}
}
}
const processedMessageJSON = JSON.stringify(parsedMessage, (key, value) => {
return typeof value === 'string' ? fromEditorPath(value) : value;
});
return originalOnMessage.call(
this,
isStringMessage ? processedMessageJSON : JSON.parse(processedMessageJSON)
);
},
send(/** @type {any} */ msg) {
return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => {
return typeof value === `string` ? toEditorPath(value) : value;
})));
}
});
return tsserver;
};
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/lib/tsserver.js
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/lib/tsserver.js your application uses
module.exports = moduleWrapper(absRequire(`typescript/lib/tsserver.js`));

View File

@ -1,225 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
const moduleWrapper = tsserver => {
if (!process.versions.pnp) {
return tsserver;
}
const {isAbsolute} = require(`path`);
const pnpApi = require(`pnpapi`);
const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//);
const isPortal = str => str.startsWith("portal:/");
const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`);
const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => {
return `${locator.name}@${locator.reference}`;
}));
// VSCode sends the zip paths to TS using the "zip://" prefix, that TS
// doesn't understand. This layer makes sure to remove the protocol
// before forwarding it to TS, and to add it back on all returned paths.
function toEditorPath(str) {
// We add the `zip:` prefix to both `.zip/` paths and virtual paths
if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) {
// We also take the opportunity to turn virtual paths into physical ones;
// this makes it much easier to work with workspaces that list peer
// dependencies, since otherwise Ctrl+Click would bring us to the virtual
// file instances instead of the real ones.
//
// We only do this to modules owned by the the dependency tree roots.
// This avoids breaking the resolution when jumping inside a vendor
// with peer dep (otherwise jumping into react-dom would show resolution
// errors on react).
//
const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str;
if (resolved) {
const locator = pnpApi.findPackageLocator(resolved);
if (locator && (dependencyTreeRoots.has(`${locator.name}@${locator.reference}`) || isPortal(locator.reference))) {
str = resolved;
}
}
str = normalize(str);
if (str.match(/\.zip\//)) {
switch (hostInfo) {
// Absolute VSCode `Uri.fsPath`s need to start with a slash.
// VSCode only adds it automatically for supported schemes,
// so we have to do it manually for the `zip` scheme.
// The path needs to start with a caret otherwise VSCode doesn't handle the protocol
//
// Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910
//
// 2021-10-08: VSCode changed the format in 1.61.
// Before | ^zip:/c:/foo/bar.zip/package.json
// After | ^/zip//c:/foo/bar.zip/package.json
//
// 2022-04-06: VSCode changed the format in 1.66.
// Before | ^/zip//c:/foo/bar.zip/package.json
// After | ^/zip/c:/foo/bar.zip/package.json
//
// 2022-05-06: VSCode changed the format in 1.68
// Before | ^/zip/c:/foo/bar.zip/package.json
// After | ^/zip//c:/foo/bar.zip/package.json
//
case `vscode <1.61`: {
str = `^zip:${str}`;
} break;
case `vscode <1.66`: {
str = `^/zip/${str}`;
} break;
case `vscode <1.68`: {
str = `^/zip${str}`;
} break;
case `vscode`: {
str = `^/zip/${str}`;
} break;
// To make "go to definition" work,
// We have to resolve the actual file system path from virtual path
// and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip)
case `coc-nvim`: {
str = normalize(resolved).replace(/\.zip\//, `.zip::`);
str = resolve(`zipfile:${str}`);
} break;
// Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server)
// We have to resolve the actual file system path from virtual path,
// everything else is up to neovim
case `neovim`: {
str = normalize(resolved).replace(/\.zip\//, `.zip::`);
str = `zipfile://${str}`;
} break;
default: {
str = `zip:${str}`;
} break;
}
} else {
str = str.replace(/^\/?/, process.platform === `win32` ? `` : `/`);
}
}
return str;
}
function fromEditorPath(str) {
switch (hostInfo) {
case `coc-nvim`: {
str = str.replace(/\.zip::/, `.zip/`);
// The path for coc-nvim is in format of /<pwd>/zipfile:/<pwd>/.yarn/...
// So in order to convert it back, we use .* to match all the thing
// before `zipfile:`
return process.platform === `win32`
? str.replace(/^.*zipfile:\//, ``)
: str.replace(/^.*zipfile:/, ``);
} break;
case `neovim`: {
str = str.replace(/\.zip::/, `.zip/`);
// The path for neovim is in format of zipfile:///<pwd>/.yarn/...
return str.replace(/^zipfile:\/\//, ``);
} break;
case `vscode`:
default: {
return str.replace(/^\^?(zip:|\/zip(\/ts-nul-authority)?)\/+/, process.platform === `win32` ? `` : `/`)
} break;
}
}
// Force enable 'allowLocalPluginLoads'
// TypeScript tries to resolve plugins using a path relative to itself
// which doesn't work when using the global cache
// https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238
// VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but
// TypeScript already does local loads and if this code is running the user trusts the workspace
// https://github.com/microsoft/vscode/issues/45856
const ConfiguredProject = tsserver.server.ConfiguredProject;
const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype;
ConfiguredProject.prototype.enablePluginsWithOptions = function() {
this.projectService.allowLocalPluginLoads = true;
return originalEnablePluginsWithOptions.apply(this, arguments);
};
// And here is the point where we hijack the VSCode <-> TS communications
// by adding ourselves in the middle. We locate everything that looks
// like an absolute path of ours and normalize it.
const Session = tsserver.server.Session;
const {onMessage: originalOnMessage, send: originalSend} = Session.prototype;
let hostInfo = `unknown`;
Object.assign(Session.prototype, {
onMessage(/** @type {string | object} */ message) {
const isStringMessage = typeof message === 'string';
const parsedMessage = isStringMessage ? JSON.parse(message) : message;
if (
parsedMessage != null &&
typeof parsedMessage === `object` &&
parsedMessage.arguments &&
typeof parsedMessage.arguments.hostInfo === `string`
) {
hostInfo = parsedMessage.arguments.hostInfo;
if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK) {
const [, major, minor] = (process.env.VSCODE_IPC_HOOK.match(
// The RegExp from https://semver.org/ but without the caret at the start
/(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/
) ?? []).map(Number)
if (major === 1) {
if (minor < 61) {
hostInfo += ` <1.61`;
} else if (minor < 66) {
hostInfo += ` <1.66`;
} else if (minor < 68) {
hostInfo += ` <1.68`;
}
}
}
}
const processedMessageJSON = JSON.stringify(parsedMessage, (key, value) => {
return typeof value === 'string' ? fromEditorPath(value) : value;
});
return originalOnMessage.call(
this,
isStringMessage ? processedMessageJSON : JSON.parse(processedMessageJSON)
);
},
send(/** @type {any} */ msg) {
return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => {
return typeof value === `string` ? toEditorPath(value) : value;
})));
}
});
return tsserver;
};
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/lib/tsserverlibrary.js
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/lib/tsserverlibrary.js your application uses
module.exports = moduleWrapper(absRequire(`typescript/lib/tsserverlibrary.js`));

View File

@ -1,20 +0,0 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.cjs";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = createRequire(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/lib/typescript.js
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/lib/typescript.js your application uses
module.exports = absRequire(`typescript/lib/typescript.js`);

View File

@ -1,6 +0,0 @@
{
"name": "typescript",
"version": "4.4.4-sdk",
"main": "./lib/typescript.js",
"type": "commonjs"
}

View File

@ -1,3 +0,0 @@
nodeLinker: pnp
yarnPath: .yarn/releases/yarn-3.6.0.cjs

View File

@ -2,7 +2,7 @@
## About VelaUX
The [KubeVela](https://github.com/oam-dev/kubevela) User Experience (UX) Dashboard. Designed as an extensible, application-oriented delivery platform.
The [KubeVela](https://github.com/oam-dev/kubevela) User Experience (UX) Dashboard, it provides out-of-box application delivery and management platform.
To help us create a safe and positive community experience for all, we require all participants to adhere to the [Code of Conduct](https://github.com/oam-dev/kubevela/blob/master/CODE_OF_CONDUCT.md).
@ -33,4 +33,4 @@ When you're ready to contribute, it's time to [Create a pull request](https://gi
### Report bugs / Suggest enhancements / Answering questions etc.
VelaUX follows the KubeVela [CONTRIBUTING Guide](https://github.com/oam-dev/kubevela/blob/master/CONTRIBUTING.md)
velaux follows the KubeVela [CONTRIBUTING Guide](https://github.com/oam-dev/kubevela/blob/master/CONTRIBUTING.md)

View File

@ -1,47 +1,16 @@
ARG BASE_IMAGE
FROM node:16-alpine as ui-builder
ARG VERSION
FROM node:16-alpine as builder
WORKDIR /app/velaux
ADD . .
ENV VERSION=${VERSION}
RUN apk add --no-cache git make clang build-base python3
RUN yarn install && yarn build
RUN apk add --no-cache git && yarn install && yarn build
RUN rm -rf /app/velaux/build/mock
# Build the manager binary
FROM --platform=${BUILDPLATFORM:-linux/amd64} golang:1.19-alpine@sha256:2381c1e5f8350a901597d633b2e517775eeac7a6682be39225a93b22cfd0f8bb as server-builder
ARG GOPROXY
ENV GOPROXY=${GOPROXY:-https://goproxy.cn}
WORKDIR /workspace
# Copy the Go Modules manifests
COPY go.mod go.mod
COPY go.sum go.sum
# cache deps before building and copying source so that we don't need to re-download as much
# and so that source changes don't invalidate our downloaded layer
RUN go mod download
# Copy the go source for building server
COPY cmd/server/ cmd/server/
COPY pkg/ pkg/
# Build
ARG TARGETARCH
ARG VERSION
FROM nginx:1.21
ARG GITVERSION
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=${TARGETARCH} \
go build -a -ldflags "-s -w -X github.com/oam-dev/kubevela/version.VelaVersion=${VERSION:-undefined} -X github.com/oam-dev/kubevela/version.GitRevision=${GITVERSION:-undefined}" \
-o apiserver-${TARGETARCH} cmd/server/main.go
FROM ${BASE_IMAGE:-alpine@sha256:e2e16842c9b54d985bf1ef9242a313f36b856181f188de21313820e177002501}
# This is required by daemon connecting with cri
RUN apk add --no-cache ca-certificates bash expat
WORKDIR /app/velaux
ARG TARGETARCH
ENV PATH=$PATH:/app/velaux
COPY --from=server-builder /workspace/apiserver-${TARGETARCH} /app/velaux/server
COPY --from=ui-builder /app/velaux/public /app/velaux/public
CMD ["server"]
COPY --from=builder /app/velaux/build /usr/share/nginx/html
COPY web.conf /etc/nginx/nginx.conf
RUN echo "${GITVERSION}" > /tmp/version
COPY entrypoint.sh /entrypoint.sh
ENV KUBEVELA_API_URL="127.0.0.1:8000"
ENV DEX_URL="127.0.0.1:5556"
ENTRYPOINT ["/entrypoint.sh"]
CMD ["nginx", "-g", "daemon off;"]

View File

@ -1,49 +0,0 @@
ARG BASE_IMAGE
FROM node:16-alpine as ui-builder
ARG VERSION
WORKDIR /app/velaux
ADD . .
ENV VERSION=${VERSION}
RUN apk add --no-cache git
RUN yarn install && yarn build && yarn packages:plugins
# Build the manager binary
FROM --platform=${BUILDPLATFORM:-linux/amd64} golang:1.19-alpine@sha256:2381c1e5f8350a901597d633b2e517775eeac7a6682be39225a93b22cfd0f8bb as server-builder
ARG GOPROXY
ENV GOPROXY=${GOPROXY:-https://goproxy.cn}
WORKDIR /workspace
# Copy the Go Modules manifests
COPY go.mod go.mod
COPY go.sum go.sum
# cache deps before building and copying source so that we don't need to re-download as much
# and so that source changes don't invalidate our downloaded layer
RUN go mod download
# Copy the go source for building server
COPY cmd/server/ cmd/server/
COPY pkg/ pkg/
# Build
ARG TARGETARCH
ARG VERSION
ARG GITVERSION
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=${TARGETARCH} \
go build -a -ldflags "-s -w -X github.com/oam-dev/kubevela/version.VelaVersion=${VERSION:-undefined} -X github.com/oam-dev/kubevela/version.GitRevision=${GITVERSION:-undefined}" \
-o apiserver-${TARGETARCH} cmd/server/main.go
FROM ${BASE_IMAGE:-alpine@sha256:e2e16842c9b54d985bf1ef9242a313f36b856181f188de21313820e177002501}
# This is required by daemon connecting with cri
RUN apk add --no-cache ca-certificates bash expat
WORKDIR /app/velaux
ARG TARGETARCH
ENV PATH=$PATH:/app/velaux
COPY --from=server-builder /workspace/apiserver-${TARGETARCH} /app/velaux/server
COPY --from=ui-builder /app/velaux/public /app/velaux/public
COPY --from=ui-builder /app/velaux/plugins/app-demo/dist /app/velaux/plugins/app-demo
COPY --from=ui-builder /app/velaux/plugins/node-dashboard/dist /app/velaux/plugins/node-dashboard
CMD ["server"]

View File

@ -1,38 +1,9 @@
ARG BASE_IMAG
# Build the manager binary
FROM --platform=${BUILDPLATFORM:-linux/amd64} golang:1.19-alpine@sha256:2381c1e5f8350a901597d633b2e517775eeac7a6682be39225a93b22cfd0f8bb as server-builder
ARG GOPROXY
ENV GOPROXY=${GOPROXY:-https://goproxy.cn}
WORKDIR /workspace
# Copy the Go Modules manifests
COPY go.mod go.mod
COPY go.sum go.sum
# cache deps before building and copying source so that we don't need to re-download as much
# and so that source changes don't invalidate our downloaded layer
COPY vendor vendor
FROM nginx:1.21
# Copy the go source for building server
COPY cmd/server/ cmd/server/
COPY pkg/ pkg/
# Build
ARG TARGETARCH
ARG VERSION
ARG GITVERSION
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux GOARCH=${TARGETARCH} \
go build -a -ldflags "-s -w -X github.com/oam-dev/kubevela/version.VelaVersion=${VERSION:-undefined} -X github.com/oam-dev/kubevela/version.GitRevision=${GITVERSION:-undefined}" \
-o apiserver-${TARGETARCH} cmd/server/main.go
FROM ${BASE_IMAGE:-alpine@sha256:e2e16842c9b54d985bf1ef9242a313f36b856181f188de21313820e177002501}
# This is required by daemon connecting with cri
RUN apk add --no-cache ca-certificates bash expat
WORKDIR /app/velaux
ARG TARGETARCH
ENV PATH=$PATH:/app/velaux
COPY --from=server-builder /workspace/apiserver-${TARGETARCH} /app/velaux/server
COPY ./public /app/velaux/public
CMD ["server"]
COPY build /usr/share/nginx/html
COPY web.conf /etc/nginx/nginx.conf
COPY entrypoint.sh /entrypoint.sh
ENV KUBEVELA_API_URL="127.0.0.1:8000"
ENV DEX_URL="127.0.0.1:5556"
ENTRYPOINT ["/entrypoint.sh"]
CMD ["nginx", "-g", "daemon off;"]

131
Makefile
View File

@ -1,131 +0,0 @@
include makefiles/const.mk
include makefiles/build.mk
all: docker-build
.PHONY: golangci
golangci:
ifeq ($(shell $(GLOBAL_GOLANGCILINT) version --format short), $(GOLANGCILINT_VERSION))
@$(OK) golangci-lint is already installed
GOLANGCILINT=$(GLOBAL_GOLANGCILINT)
else ifeq ($(shell $(GOBIN_GOLANGCILINT) version --format short), $(GOLANGCILINT_VERSION))
@$(OK) golangci-lint is already installed
GOLANGCILINT=$(GOBIN_GOLANGCILINT)
else
@{ \
set -e ;\
echo 'installing golangci-lint-$(GOLANGCILINT_VERSION)' ;\
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOBIN) v$(GOLANGCILINT_VERSION) ;\
echo 'Successfully installed' ;\
}
GOLANGCILINT=$(GOBIN)/golangci-lint
endif
.PHONY: staticchecktool
staticchecktool:
ifeq (, $(shell which staticcheck))
@{ \
set -e ;\
echo 'installing honnef.co/go/tools/cmd/staticcheck ' ;\
go install honnef.co/go/tools/cmd/staticcheck@v0.4.7 ;\
}
STATICCHECK=$(GOBIN)/staticcheck
else
STATICCHECK=$(shell which staticcheck)
endif
.PHONY: goimports
goimports:
ifeq (, $(shell which goimports))
@{ \
set -e ;\
go install golang.org/x/tools/cmd/goimports@6546d82b229aa5bd9ebcc38b09587462e34b48b6 ;\
}
GOIMPORTS=$(GOBIN)/goimports
else
GOIMPORTS=$(shell which goimports)
endif
.PHONY: e2e-setup-core
e2e-setup-core: install-vela install-core install-addon
.PHONY: install-vela
install-vela:
curl -fsSl https://kubevela.io/script/install.sh | bash -s v1.9.0-alpha.3
install-core:
vela install -v v1.9.0-alpha.2 -y
install-addon:
vela addon enable fluxcd
vela addon enable vela-workflow --override-definitions
kubectl wait --for=condition=Ready pod -l app=source-controller -n flux-system --timeout=600s
kubectl wait --for=condition=Ready pod -l app=helm-controller -n flux-system --timeout=600s
kubectl wait --for=condition=Ready pod -l app.kubernetes.io/name=vela-workflow -n vela-system --timeout=600s
start-addon-mock-server:
go run ./e2e-test/addon &
load-image:
k3d image import oamdev/velaux:latest || { echo >&2 "kind not installed or error loading image: $(VELA_CORE_TEST_IMAGE)"; exit 1; }
enable-addon-no-replicas:
vela addon enable ./addon replicas=0
enable-addon:
vela addon enable ./addon
.PHONY: e2e-server-test
e2e-server-test:
go test -v -coverpkg=./... -coverprofile=/tmp/e2e_apiserver_test.out ./e2e-test
@$(OK) tests pass
unit-test-server:
go test -gcflags=all=-l -coverprofile=coverage.txt $(shell go list ./pkg/... ./cmd/...)
setup-test-server:
curl -L -o kubebuilder https://go.kubebuilder.io/dl/latest/$(shell go env GOOS)/$(shell go env GOARCH)
chmod +x kubebuilder
sudo mv kubebuilder /usr/local/bin/
go install sigs.k8s.io/controller-runtime/tools/setup-envtest@latest
${eval OUTPUT = $(shell ${GOBIN}/setup-envtest --bin-dir /tmp use)}
${eval BIN_PATH=$(lastword $(subst Path:, ,${OUTPUT}))}
sudo mkdir -p /usr/local/kubebuilder/bin
sudo mv ${BIN_PATH}/* /usr/local/kubebuilder/bin
build-swagger:
go run ./cmd/server/main.go build-swagger ./docs/apidoc/swagger.json
lint: golangci
@$(INFO) lint
@$(GOLANGCILINT) run --timeout 5m
vet:
@$(INFO) go vet
@go vet $(shell go list ./...)
fmt: goimports
go fmt ./...
$(GOIMPORTS) -local github.com/kubevela/velaux -w $$(go list -f {{.Dir}} ./...)
staticcheck: staticchecktool
@$(INFO) staticcheck
@$(STATICCHECK) $(shell go list ./...)
mod:
go mod tidy
reviewable: mod fmt vet staticcheck lint
# Execute auto-gen code commands and ensure branch is clean.
check-diff: reviewable
git --no-pager diff
git diff --quiet || ($(ERR) please run 'make reviewable' to include all changes && false)
@$(OK) branch is clean
run-server:
go run ./cmd/server/main.go
build-ui:
@$(INFO) Building UI
yarn build
build-test-image: build-ui
@$(INFO) Building image
docker build -t velaux:latest -f Dockerfile.local .

5
OWNERS
View File

@ -2,7 +2,4 @@ owners:
- barnettZQG
approvers:
emeritus-members:
- wangbow
- hanxie-crypto
- wangbow

View File

@ -1,73 +1,23 @@
![alt](docs/images/KubeVela-03.png)
[![Go Report Card](https://goreportcard.com/badge/github.com/kubevela/velaux)](https://goreportcard.com/report/github.com/kubevela/velaux)
![Docker Pulls](https://img.shields.io/docker/pulls/oamdev/velaux)
# VelaUX
## Overview
[VelaUX](https://github.com/kubevela/velaux) is a web portal for KubeVela end users, and also functions as an opinionated application delivery platform. Additionally, it operates as a highly adaptable plugin framework, empowering developers to create bespoke plugins and smoothly integrate them into the KubeVela platform. This approach delivers unparalleled flexibility and customization options for scaling up the platform's capabilities.
The [KubeVela](https://github.com/oam-dev/kubevela) User Experience (UX) Dashboard. Designed as an extensible, application-oriented delivery control panel.
### Highlights
*Customizable User Interface*: With VelaUX, enterprises can tailor the user interface to their specific needs for managing applications and infrastructure. This feature leads to a more intuitive and efficient user experience, resulting in increased productivity and better resource utilization.
*Easy Integration*: VelaUX is designed for seamless integration with the KubeVela platform, simplifying the deployment and management of cloud-native atomic capabilities within the platform. This makes it easier for enterprises to build platforms that meet their needs for continuous application delivery, observability, security, and other requirements based on these cloud-native atomic capabilities.
*Out-of-Box Platform*: VelaUX provides a comprehensive set of features that empower enterprises to deploy and monitor their applications effectively. These features include multi-cluster and multi-environment support, pipeline management, observability, and more. Additionally, VelaUX streamlines the complexities of Kubernetes, making it simpler for users to manage their deployments.
## Quick Start
### Users
Please refer to this guide to install: [https://kubevela.net/docs/install](https://kubevela.net/docs/install)
### Developers
#### Build the frontend
Make sure you have installed [yarn 2.0](https://yarnpkg.com/getting-started/install), This is required.
Install frontend dependencies and build the frontend.
## Quickstart
```shell
yarn install
yarn build
yarn start
```
#### Start the server
Configure the API server address locally.
1. Install the Go 1.19+.
2. Prepare a KubeVela core environment.
```shell
## Linux or Mac
curl -fsSl https://static.kubevela.net/script/install-velad.sh | bash
## Windows
powershell -Command "iwr -useb https://static.kubevela.net/script/install-velad.ps1 | iex"
velad install
```
3. Init the dependencies.
```shell
vela addon enable ./addon replicas=0
```
4. Start the server on local
```shell
# Install all dependencies
go mod tidy
# Setting the kube config
export KUBECONFIG=$(velad kubeconfig --host)
# Start the server
make run-server
```
Then, you can open the http://127.0.0.1:8000. More info refer to [contributing](./docs/contributing/velaux.md)
```shell
echo "BASE_DOMAIN='http://127.0.0.1:8000'" > .env
```
## Community

View File

@ -1,8 +0,0 @@
info: """
To open the dashboard directly by port-forward:
vela port-forward -n vela-system addon-velaux 8000:8000
Please refer to https://kubevela.io/docs/reference/addons/velaux for more VelaUX addon installation and visiting method.
"""
notes: (info)

View File

@ -1,77 +0,0 @@
import (
"encoding/base64"
"encoding/json"
"strconv"
"vela/config"
)
metadata: {
name: "image-registry"
alias: "Image Registry"
scope: "project"
description: "Config information to authenticate image registry"
sensitive: false
}
template: {
output: {
apiVersion: "v1"
kind: "Secret"
metadata: {
name: context.name
namespace: context.namespace
labels: {
"config.oam.dev/catalog": "velacore-config"
"config.oam.dev/type": "image-registry"
}
}
if parameter.auth != _|_ {
type: "kubernetes.io/dockerconfigjson"
}
if parameter.auth == _|_ {
type: "Opaque"
}
stringData: {
if parameter.auth != _|_ && parameter.auth.username != _|_ {
".dockerconfigjson": json.Marshal({
"auths": (parameter.registry): {
"username": parameter.auth.username
"password": parameter.auth.password
if parameter.auth.email != _|_ {
"email": parameter.auth.email
}
"auth": base64.Encode(null, (parameter.auth.username + ":" + parameter.auth.password))
}
})
}
if parameter.insecure != _|_ {
"insecure-skip-verify": strconv.FormatBool(parameter.insecure)
}
if parameter.useHTTP != _|_ {
"protocol-use-http": strconv.FormatBool(parameter.useHTTP)
}
}
}
validation: config.#ImageRegistry & {
$params: parameter
}
parameter: {
// +usage=Image registry FQDN, such as: index.docker.io
registry: *"index.docker.io" | string
// +usage=Authenticate the image registry
auth?: {
// +usage=Private Image registry username
username: string
// +usage=Private Image registry password
password: string
// +usage=Private Image registry email
email?: string
}
// +usage=For the registry server that uses the self-signed certificate
insecure?: bool
// +usage=For the registry server that uses the HTTP protocol
useHTTP?: bool
}
}

View File

@ -1,56 +0,0 @@
metadata: {
name: "nacos-config"
alias: "Nacos Configuration"
description: "Write the configuration to the nacos"
sensitive: false
scope: "system"
}
template: {
nacos: {
// The endpoint can not references the parameter.
endpoint: {
// Users must create a config base the nacos-server template firstly.
name: "nacos"
}
format: parameter.contentType
// could references the parameter
metadata: {
dataId: parameter.dataId
group: parameter.group
if parameter.appName != _|_ {
appName: parameter.appName
}
if parameter.namespaceId != _|_ {
namespaceId: parameter.namespaceId
}
if parameter.tenant != _|_ {
tenant: parameter.tenant
}
if parameter.tag != _|_ {
tag: parameter.tag
}
}
content: parameter.content
}
parameter: {
// +usage=Configuration ID
dataId: string
// +usage=Configuration group
group: *"DEFAULT_GROUP" | string
// +usage=The configuration content.
content: {
...
}
contentType: *"json" | "yaml" | "properties" | "toml"
// +usage=The app name of the configuration
appName?: string
// +usage=The namespaceId of the configuration
namespaceId?: string
// +usage=The tenant, corresponding to the namespace ID field of Nacos
tenant?: string
// +usage=The tag of the configuration
tag?: string
}
}

View File

@ -1,38 +0,0 @@
metadata: {
name: "nacos-server"
alias: "Nacos Server"
description: "Config the Nacos server connectors"
sensitive: false
scope: "system"
}
template: {
parameter: {
// +usage=Directly configure the Nacos server address
servers?: [...{
// +usage=the nacos server address
ipAddr: string
// +usage=nacos server port
port: *8849 | int
// +usage=nacos server grpc port, default=server port + 1000, this is not required
grpcPort?: int
}]
// +usage=Discover the Nacos servers by the client.
client?: {
// +usage=the endpoint for get Nacos server addresses
endpoint: string
// +usage=the AccessKey for kms
accessKey?: string
// +usage=the SecretKey for kms
secretKey?: string
// +usage=the regionId for kms
regionId?: string
// +usage=the username for nacos auth
username?: string
// +usage=the password for nacos auth
password?: string
// +usage=it's to open kms,default is false. https://help.aliyun.com/product/28933.html
openKMS?: bool
}
}
}

View File

@ -1,16 +0,0 @@
name: velaux
version: v1.8.0-rc.3
description: KubeVela User Experience (UX). An extensible, application-oriented delivery and management Platform.
icon: https://static.kubevela.net/images/logos/KubeVela%20-03.png
url: https://kubevela.io
tags:
- Official
- Dashboard
deployTo:
controlPlane: true
runtimeCluster: false
system:
vela: ">=v1.6.0"

View File

@ -1,28 +0,0 @@
parameter: {
// +usage=Specify the image hub of velaux, eg. "acr.kubevela.net"
repo?: string
// +usage=Specify the database type, current support KubeAPI(default) and MongoDB.
dbType: *"kubeapi" | "mongodb"
// +usage=Specify the database name, for the kubeapi db type, it represents namespace.
database?: string
// +usage=Specify the MongoDB URL. it only enabled where DB type is MongoDB.
dbURL?: string
// +usage=Specify the domain, if set, ingress will be created if the gateway driver is nginx.
domain?: string
// +usage=Specify the name of the certificate cecret, if set, means enable the HTTPs.
secretName?: string
// +usage=Specify the gateway type.
gatewayDriver: *"nginx" | "traefik"
// +usage=Specify the serviceAccountName for apiserver
serviceAccountName: *"kubevela-ux" | string
// +usage=Specify the service type.
serviceType: *"ClusterIP" | "NodePort" | "LoadBalancer"
// +usage=Specify the names of imagePullSecret for private image registry, eg. "{a,b,c}"
imagePullSecrets?: [...string]
// +usage=Specify the replicas.
replicas: *1 | int
// +usage=Specify nodeport. This will be ignored if serviceType is not NodePort.
nodePort: *30000 | int
// +usage=Enable impersonation means impersonating the login user to request the KubeAPI.
enableImpersonation: true | *false
}

View File

@ -1,20 +0,0 @@
# VelaUX
VelaUX provides end-to-end application delivery and management experience, including API services and UI dashboard.
## Installation
There are the following precautions
1. If you are used in production, please configure the MongoDB database.
2. Configure the Domain parameter to generate Ingress resources, otherwise, only Service resources will be generated.
## Feedback
If you have any questions or feedback during use, please contact us through the following methods.
- Create Issue: [https://github.com/oam-dev/velaux/issues](https://github.com/oam-dev/velaux/issues)
- Slack: [CNCF Slack](https://slack.cncf.io/) #kubevela channel (_English_)
- Join DingTalk Group: 23310022
- Join Wechat Group: Broker wechat to add you into the user group.
<img src="https://static.kubevela.net/images/barnett-wechat.jpg" width="200" />

View File

@ -1,17 +0,0 @@
# VelaUX 介绍
VelaUX 提供端到端的应用交付和管理体验,其中包括了 API 服务和 UI 控制面板。
## 安装说明
1. 如果是生产使用,建议配置 MongoDB 数据库。
2. 配置 Domain 参数即可生成 Ingress 资源,否则只会生成 Service 资源。
## 使用反馈
您在使用过程中有任何疑问或反馈,请通过以下方式与我们取得联系。
- 提交 Issue: [https://github.com/oam-dev/velaux/issues](https://github.com/oam-dev/velaux/issues)
- 加入钉钉群: 23310022
- 加入微信群: 添加管理员微信并注明反馈 VelaUX 问题
<img src="https://static.kubevela.net/images/barnett-wechat.jpg" width="200" />

View File

@ -1,50 +0,0 @@
package main
additionalPrivileges: {
type: "k8s-objects"
name: "velaux-additional-privileges"
properties: objects: [
{
apiVersion: "rbac.authorization.k8s.io/v1"
kind: "ClusterRoleBinding"
metadata: name: "clustergateway:kubevela:ux"
roleRef: {
apiGroup: "rbac.authorization.k8s.io"
kind: "ClusterRole"
name: "cluster-admin"
}
subjects: [{
kind: "Group"
name: "kubevela:ux"
apiGroup: "rbac.authorization.k8s.io"
}, {
kind: "ServiceAccount"
name: parameter["serviceAccountName"]
namespace: "vela-system"
}]
},
{
apiVersion: "v1"
kind: "ServiceAccount"
metadata: {
name: parameter["serviceAccountName"]
namespace: "vela-system"
}
secrets: [
{
name: parameter["serviceAccountName"] + "-token"
},
]
},
{
apiVersion: "v1"
kind: "Secret"
metadata: {
name: parameter["serviceAccountName"] + "-token"
namespace: "vela-system"
annotations: "kubernetes.io/service-account.name": parameter["serviceAccountName"]
}
type: "kubernetes.io/service-account-token"
},
]
}

View File

@ -1,94 +0,0 @@
package main
_version: context.metadata.version
database: *[ if parameter["database"] != _|_ {
"--datastore-database=" + parameter["database"]
}] | []
dbURL: *[ if parameter["dbURL"] != _|_ {
"--datastore-url=" + parameter["dbURL"]
}] | []
enableImpersonation: *[ if parameter["enableImpersonation"] {
"--feature-gates=EnableImpersonation=true"
}] | []
_nginxTrait: *[
if parameter["domain"] != _|_ && parameter["gatewayDriver"] == "nginx" {
{
type: "gateway"
properties: {
domain: parameter["domain"]
http: {
"/": 8000
}
class: "nginx"
}
}
},
] | []
_traefikTrait: *[
if parameter["domain"] != _|_ && parameter["gatewayDriver"] == "traefik" {
{
type: "http-route"
properties: {
domains: [ parameter["domain"]]
rules: [{port: 8000}]
}
}
},
] | []
_httpsTrait: *[ if parameter["secretName"] != _|_ && parameter["domain"] != _|_ && parameter["gatewayDriver"] == "traefik" {
type: "https-route"
properties: {
domains: [ parameter["domain"]]
rules: [{port: 8000}]
secrets: [{
name: parameter["secretName"]
}]
}}] | []
server: {
name: "velaux-server"
type: "webservice"
properties: {
if parameter["repo"] == _|_ {
image: "oamdev/velaux:" + _version
}
if parameter["repo"] != _|_ {
image: parameter["repo"] + "/" + "oamdev/velaux:" + _version
}
if parameter["imagePullSecrets"] != _|_ {
imagePullSecrets: parameter["imagePullSecrets"]
}
if parameter["serviceType"] != _|_ {
exposeType: parameter["serviceType"]
}
cmd: ["server", "--datastore-type=" + parameter["dbType"], "--feature-gates=EnableCacheJSFile=true"] + database + dbURL + enableImpersonation
ports: [
{
port: 8000
protocol: "TCP"
expose: true
if parameter["serviceType"] == "NodePort" {
nodePort: parameter["nodePort"]
}
},
]
}
dependsOn: ["velaux-additional-privileges"]
traits: [
{
type: "service-account"
properties: name: parameter["serviceAccountName"]
},
{type: "scaler", properties: replicas: parameter["replicas"]},
] + _nginxTrait + _traefikTrait + _httpsTrait
}

View File

@ -1,37 +0,0 @@
- jsonKey: dbType
label: DBType
validate:
required: true
defaultValue: kubeapi
options:
- label: MongoDB
value: mongodb
- label: KubeAPI
value: kubeapi
sort: 1
- jsonKey: dbURL
label: DatabaseURL
sort: 3
conditions:
- jsonKey: dbType
op: "=="
value: "mongodb"
validate:
required: true
- jsonKey: database
sort: 7
validate:
defaultValue: kubevela
required: true
- jsonKey: serviceAccountName
disable: true
- jsonKey: serviceType
sort: 8
- jsonKey: domain
sort: 9
- jsonKey: gatewayDriver
sort: 10
- jsonKey: repo
sort: 11
- jsonKey: imagePullSecrets
sort: 13

View File

@ -1,2 +0,0 @@
- jsonKey: objects
uiType: K8sObjectsCode

View File

@ -1,223 +0,0 @@
- uiType: ImageInput
jsonKey: image
label: Image
sort: 1
- jsonKey: imagePullSecret
sort: 2
disable: true
- jsonKey: restart
sort: 3
uiType: Select
validate:
required: true
defaultValue: Never
options:
- label: Never
value: Never
- label: OnFailure
value: OnFailure
- jsonKey: count
sort: 4
uiType: Number
validate:
required: true
defaultValue: 1
min: 0
max: 128
- jsonKey: memory
uiType: MemoryNumber
sort: 5
label: Memory
description: Specifies the memory resource required for the container, If set to 0, there is no limit.
validate:
required: true
defaultValue: "1024Mi"
style:
colSpan: 12
- jsonKey: cpu
uiType: CPUNumber
sort: 7
label: CPU
description: Specifies the cpu resource required for the container, If set to 0, there is no limit.
validate:
required: true
defaultValue: "0.5"
style:
colSpan: 12
- jsonKey: cmd
label: CMD
sort: 9
- jsonKey: env
sort: 10
label: ENV
subParameterGroupOption:
- label: Add By Value
keys:
- name
- value
- label: Add By Secret
keys:
- name
- valueFrom
subParameters:
- jsonKey: valueFrom
label: Secret Selector
uiType: Ignore
subParameters:
- jsonKey: configMapKeyRef
disable: true
- jsonKey: secretKeyRef
uiType: Ignore
subParameters:
- jsonKey: name
label: Secret Name
uiType: SecretSelect
sort: 1
- jsonKey: key
label: Secret Key
uiType: SecretKeySelect
sort: 3
uiType: Structs
- jsonKey: readinessProbe
uiType: Group
label: ReadinessProbe
subParameters:
- jsonKey: hostAliases
disable: true
- jsonKey: timeoutSeconds
sort: 1
style:
colSpan: 12
- jsonKey: failureThreshold
sort: 4
style:
colSpan: 12
- jsonKey: initialDelaySeconds
sort: 7
style:
colSpan: 12
validate:
defaultValue: 5
required: true
- jsonKey: periodSeconds
sort: 9
style:
colSpan: 12
- jsonKey: successThreshold
sort: 11
style:
colSpan: 12
- jsonKey: exec
sort: 14
- jsonKey: httpGet
sort: 19
subParameters:
- jsonKey: port
sort: 1
validate:
required: true
min: 1
style:
colSpan: 12
- jsonKey: path
sort: 3
validate:
required: true
pattern: ^/(.*)$
style:
colSpan: 12
- jsonKey: httpHeaders
sort: 5
- jsonKey: tcpSocket
sort: 19
subParameters:
- jsonKey: port
validate:
required: true
min: 1
sort: 13
- jsonKey: livenessProbe
uiType: Group
label: LivenessProbe
subParameters:
- jsonKey: hostAliases
disable: true
- jsonKey: timeoutSeconds
sort: 1
style:
colSpan: 12
- jsonKey: failureThreshold
sort: 4
style:
colSpan: 12
- jsonKey: initialDelaySeconds
sort: 7
style:
colSpan: 12
validate:
defaultValue: 5
required: true
- jsonKey: periodSeconds
sort: 9
style:
colSpan: 12
- jsonKey: successThreshold
sort: 11
style:
colSpan: 12
- jsonKey: exec
sort: 14
- jsonKey: httpGet
sort: 19
subParameters:
- jsonKey: port
sort: 1
validate:
required: true
min: 1
style:
colSpan: 12
- jsonKey: path
sort: 3
validate:
required: true
pattern: ^/(.*)$
style:
colSpan: 12
- jsonKey: httpHeaders
sort: 5
- jsonKey: tcpSocket
sort: 19
subParameters:
- jsonKey: port
validate:
required: true
min: 1
sort: 15
- jsonKey: annotations
sort: 19
- jsonKey: labels
sort: 21
- description: Specify image pull policy for your service
jsonKey: imagePullPolicy
label: Image Pull Policy
uiType: Select
sort: 24
validate:
defaultValue: IfNotPresent
options:
- label: IfNotPresent
value: IfNotPresent
- label: Always
value: Always
- label: Never
value: Never
- jsonKey: volumes
disable: true

View File

@ -1,301 +0,0 @@
- uiType: ImageInput
jsonKey: image
label: Container Image
sort: 1
- jsonKey: imagePullSecret
sort: 2
disable: true
- jsonKey: memory
uiType: MemoryNumber
sort: 4
label: Memory
description: Specifies the memory resource required for the container, If set to 0, there is no limit.
validate:
required: true
defaultValue: "1024Mi"
style:
colSpan: 12
- jsonKey: cpu
uiType: CPUNumber
sort: 5
label: CPU
description: Specifies the cpu resource required for the container, If set to 0, there is no limit.
validate:
required: true
defaultValue: "0.5"
style:
colSpan: 12
- jsonKey: exposeType
sort: 6
validate:
- jsonKey: ports
label: Service Ports
sort: 7
subParameters:
- jsonKey: port
sort: 1
validate:
required: true
min: 1
- jsonKey: protocol
sort: 3
- jsonKey: name
sort: 4
disable: true
- jsonKey: expose
sort: 5
validate:
required: true
defaultValue: [{"port": 80, "protocol": "TCP", "expose": true}]
- jsonKey: cmd
label: CMD
sort: 9
- jsonKey: env
sort: 10
label: ENV
subParameterGroupOption:
- label: Add By Value
keys:
- name
- value
- label: Add By Secret
keys:
- name
- valueFrom
subParameters:
- jsonKey: valueFrom
label: Secret Selector
uiType: Ignore
subParameters:
- jsonKey: configMapKeyRef
disable: true
- jsonKey: secretKeyRef
uiType: Ignore
subParameters:
- jsonKey: name
label: Secret Name
uiType: SecretSelect
sort: 1
- jsonKey: key
label: Secret Key
uiType: SecretKeySelect
sort: 3
uiType: Structs
- jsonKey: volumeMounts
label: Persistent Storage
description: "Set the path and type that the service needs to persist."
uiType: Group
subParameters:
- jsonKey: configMap
disable: true
- jsonKey: secret
disable: true
- jsonKey: pvc
label: Storage By PVC
sort: 1
subParameters:
- jsonKey: name
sort: 1
validate:
required: true
pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])$
- jsonKey: claimName
sort: 3
- jsonKey: mountPath
sort: 5
validate:
required: true
pattern: ^/(.*)$
- jsonKey: hostPath
label: Storage By HostPath
sort: 3
subParameters:
- jsonKey: name
sort: 1
validate:
required: true
pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])$
- jsonKey: path
label: Host Path
sort: 3
validate:
required: true
pattern: ^/(.*)$
- jsonKey: mountPath
sort: 5
validate:
required: true
pattern: ^/(.*)$
- jsonKey: emptyDir
label: Temporary Storage
sort: 5
subParameters:
- jsonKey: name
sort: 1
validate:
required: true
pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])$
- jsonKey: medium
sort: 3
validate:
required: false
options:
- label: Memory
value: memory
- label: Dir
value: ""
- jsonKey: mountPath
sort: 5
validate:
required: true
pattern: ^/(.*)$
sort: 12
- jsonKey: readinessProbe
uiType: Group
label: ReadinessProbe
subParameters:
- jsonKey: hostAliases
disable: true
- jsonKey: timeoutSeconds
sort: 1
style:
colSpan: 12
- jsonKey: failureThreshold
sort: 4
style:
colSpan: 12
- jsonKey: initialDelaySeconds
sort: 7
style:
colSpan: 12
validate:
defaultValue: 5
required: true
- jsonKey: periodSeconds
sort: 9
style:
colSpan: 12
- jsonKey: successThreshold
sort: 11
style:
colSpan: 12
- jsonKey: exec
sort: 14
- jsonKey: httpGet
sort: 19
subParameters:
- jsonKey: port
sort: 1
validate:
required: true
min: 1
style:
colSpan: 12
- jsonKey: path
sort: 3
validate:
required: true
pattern: ^/(.*)$
style:
colSpan: 12
- jsonKey: httpHeaders
sort: 5
- jsonKey: tcpSocket
sort: 19
subParameters:
- jsonKey: port
validate:
required: true
min: 1
sort: 13
- jsonKey: livenessProbe
uiType: Group
label: LivenessProbe
subParameters:
- jsonKey: hostAliases
disable: true
- jsonKey: timeoutSeconds
sort: 1
style:
colSpan: 12
- jsonKey: failureThreshold
sort: 4
style:
colSpan: 12
- jsonKey: initialDelaySeconds
sort: 7
style:
colSpan: 12
validate:
defaultValue: 5
required: true
- jsonKey: periodSeconds
sort: 9
style:
colSpan: 12
- jsonKey: successThreshold
sort: 11
style:
colSpan: 12
- jsonKey: exec
sort: 14
- jsonKey: httpGet
sort: 19
subParameters:
- jsonKey: port
sort: 1
validate:
required: true
min: 1
style:
colSpan: 12
- jsonKey: path
sort: 3
validate:
required: true
pattern: ^/(.*)$
style:
colSpan: 12
- jsonKey: httpHeaders
sort: 5
- jsonKey: tcpSocket
sort: 19
subParameters:
- jsonKey: port
validate:
required: true
min: 1
sort: 15
- jsonKey: annotations
sort: 19
- jsonKey: labels
sort: 21
- description: Specify image pull policy for your service
jsonKey: imagePullPolicy
label: Image Pull Policy
uiType: Select
sort: 24
validate:
defaultValue: IfNotPresent
options:
- label: IfNotPresent
value: IfNotPresent
- label: Always
value: Always
- label: Never
value: Never
- jsonKey: addRevisionLabel
disable: true
- jsonKey: port
disable: true
- jsonKey: volumes
disable: true

View File

@ -1,24 +0,0 @@
- jsonKey: registry
sort: 1
- jsonKey: insecure
sort: 3
style:
colSpan: 12
- jsonKey: useHTTP
sort: 5
style:
colSpan: 12
- jsonKey: auth
subParameters:
- jsonKey: username
sort: 1
style:
colSpan: 12
- jsonKey: password
sort: 3
style:
colSpan: 12
uiType: Password
- jsonKey: email
sort: 8

View File

@ -1,14 +0,0 @@
- jsonKey: dataId
sort: 1
style:
colSpan: 8
- jsonKey: group
sort: 3
style:
colSpan: 8
- jsonKey: contentType
sort: 5
style:
colSpan: 8
- jsonKey: content
sort: 7

View File

@ -1,43 +0,0 @@
- sort: 100
jsonKey: servers
subParameters:
- sort: 100
label: IPAddr
jsonKey: ipAddr
- sort: 101
label: Port
jsonKey: port
- sort: 102
label: GRPCPort
jsonKey: grpcPort
- sort: 101
jsonKey: client
subParameters:
- sort: 100
jsonKey: endpoint
- sort: 101
jsonKey: accessKey
uiType: Password
- sort: 102
jsonKey: secretKey
uiType: Password
- sort: 104
style:
colSpan: 12
jsonKey: regionId
- sort: 109
style:
colSpan: 12
jsonKey: openKMS
uiType: Switch
- sort: 110
jsonKey: username
uiType: Input
style:
colSpan: 12
- sort: 113
jsonKey: password
uiType: Password
style:
colSpan: 12

View File

@ -1,6 +0,0 @@
- sort: 100
jsonKey: selector
uiType: ComponentSelect
- sort: 101
jsonKey: components
uiType: ComponentPatches

View File

@ -1,4 +0,0 @@
- jsonKey: replicas
validate:
required: true
min: 0

View File

@ -1,86 +0,0 @@
- jsonKey: pvc
sort: 1
label: PersistentVolumeClaim
subParameters:
- jsonKey: name
sort: 1
validate:
required: true
pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])$
maxLength: 32
- jsonKey: mountPath
sort: 3
validate:
required: true
pattern: ^/(.*)$
- jsonKey: resources
sort: 7
validate:
required: true
defaultValue: {requests: {storage: "8Gi"}, limits: {storage: "8Gi"}}
subParameters:
- jsonKey: requests
uiType: Ignore
style:
colSpan: 12
subParameters:
- jsonKey: storage
uiType: DiskNumber
label: Request Storage
validate:
required: true
immutable: true
sort: 1
- jsonKey: limits
sort: 3
uiType: Ignore
style:
colSpan: 12
subParameters:
- jsonKey: storage
uiType: DiskNumber
label: Limit Storage
validate:
required: true
immutable: true
- jsonKey: storageClassName
sort: 9
description: If not specified, the cluster default StorageClass is used.
- jsonKey: accessModes
sort: 11
validate:
required: false
defaultValue: ["ReadWriteOnce"]
- jsonKey: volumeMode
sort: 12
description: You can set the value of volumeMode to Block to use a volume as a raw block device.
uiType: Select
validate:
defaultValue: Filesystem
options:
- label: Filesystem
value: Filesystem
- label: Block
value: Block
- jsonKey: volumeName
sort: 14
description: The VolumeName is the binding reference to the PersistentVolume backing this claim.
- jsonKey: selector
sort: 17
- jsonKey: dataSource
sort: 19
description: It will create a new volume based on the contents of the specified data source.
- jsonKey: dataSourceRef
sort: 20
disable: true
- jsonKey: mountOnly
disable: true
- jsonKey: secret
sort: 3
disable: true
- jsonKey: configMap
sort: 5
disable: true
- jsonKey: emptyDir
disable: true

View File

@ -1,14 +0,0 @@
- sort: 10
label: Policies
jsonKey: policies
uiType: PolicySelect
validate:
required: true
- sort: 20
jsonKey: parallelism
style:
colSpan: 12
- sort: 30
jsonKey: auto
style:
colSpan: 12

View File

@ -1,8 +0,0 @@
- jsonKey: parallel
validate:
defaultValue: false
sort: 5
- jsonKey: policy
sort: 1
- jsonKey: env
sort: 3

View File

@ -1,96 +0,0 @@
- jsonKey: dingding
sort: 1
description: Support push messages to Dingding Group.
subParameters:
- jsonKey: url
uiType: Ignore
sort: 1
subParameters:
- jsonKey: value
label: Webhook
uiType: Input
validate:
required: true
- jsonKey: message
description: Specify the message that you want to sent
sort: 3
uiType: Ignore
subParameters:
- jsonKey: msgtype
disable: true
- jsonKey: link
disable: true
- jsonKey: markdown
disable: true
- jsonKey: at
disable: true
- jsonKey: actionCard
disable: true
- jsonKey: feedCard
disable: true
- jsonKey: text
uiType: Ignore
subParameters:
- jsonKey: content
label: Message
description: Specify the message that you want to sent
- jsonKey: email
sort: 3
description: Support push email message.
subParameters:
- jsonKey: from
sort: 1
subParameters:
- jsonKey: host
sort: 1
- jsonKey: port
sort: 3
- jsonKey: address
sort: 5
- jsonKey: password
sort: 7
uiType: Ignore
subParameters:
- jsonKey: value
label: Password
uiType: Password
validate:
required: true
- jsonKey: alias
sort: 9
- jsonKey: to
sort: 3
- jsonKey: content
sort: 5
subParameters:
- jsonKey: subject
sort: 1
- jsonKey: body
sort: 3
- jsonKey: slack
sort: 5
description: Support push messages to slack channel.
subParameters:
- jsonKey: url
uiType: Ignore
sort: 1
subParameters:
- jsonKey: value
label: Webhook
uiType: Input
validate:
required: true
- jsonKey: message
uiType: Ignore
subParameters:
- jsonKey: blocks
disable: true
- jsonKey: attachments
disable: true
- jsonKey: thread_ts
disable: true
- jsonKey: mrkdwn
disable: true
- jsonKey: text
label: Message
description: Specify the message that you want to sent

View File

@ -1,12 +0,0 @@
- jsonKey: url
sort: 1
label: URL
uiType: Ignore
subParameters:
- jsonKey: value
label: URL
uiType: Input
validate:
required: true
- jsonKey: data
sort: 3

View File

@ -1,9 +0,0 @@
package main
output: {
apiVersion: "core.oam.dev/v1beta1"
kind: "Application"
spec: {
components: [additionalPrivileges, server]
}
}

View File

@ -1,37 +0,0 @@
import (
"vela/ql"
)
parameter: {
name: string
namespace?: string
cluster?: string
kind: string
apiVersion: string
}
response: ql.#Read & {
value: {
apiVersion: parameter.apiVersion
kind: parameter.kind
metadata: {
name: parameter.name
if parameter.namespace != _|_ {
namespace: parameter.namespace
}
}
}
if parameter.cluster != _|_ {
cluster: parameter.cluster
}
}
if response.err == _|_ {
status: {
resource: response.value
}
}
if response.err != _|_ {
status: {
error: response.err
}
}

View File

@ -1,33 +0,0 @@
import (
"vela/ql"
)
parameter: {
appName?: string
appNs?: string
}
secretList: ql.#List & {
resource: {
apiVersion: "v1"
kind: "Secret"
}
filter: {
matchingLabels: {
"created-by": "terraform-controller"
if parameter.appName != _|_ && parameter.appNs != _|_ {
"app.oam.dev/name": parameter.appName
"app.oam.dev/namespace": parameter.appNs
}
}
}
}
status: {
if secretList.err == _|_ {
secrets: secretList.list.items
}
if secretList.err != _|_ {
error: secretList.err
}
}

View File

@ -1,30 +0,0 @@
import (
"vela/ql"
)
parameter: {
appName: string
appNs: string
}
resources: ql.#ListResourcesInApp & {
app: {
name: parameter.appName
namespace: parameter.appNs
filter: {
"apiVersion": "terraform.core.oam.dev/v1beta2"
"kind": "Configuration"
}
withStatus: true
}
}
status: {
if resources.err == _|_ {
"cloud-resources": [ for i, resource in resources.list {
resource.object
}]
}
if resources.err != _|_ {
error: resources.err
}
}

View File

@ -1,43 +0,0 @@
import (
"vela/ql"
)
collectLogs: ql.#CollectLogsInPod & {
cluster: parameter.cluster
namespace: parameter.namespace
pod: parameter.pod
options: {
container: parameter.container
previous?: parameter.previous
sinceSeconds?: parameter.sinceSeconds
sinceTime?: parameter.sinceTime
timestamps?: parameter.timestamps
tailLines?: parameter.tailLines
limitBytes?: parameter.limitBytes
}
}
status: collectLogs.outputs
parameter: {
// +usage=Specify the cluster of the pod
cluster: string
// +usage=Specify the namespace of the pod
namespace: string
// +usage=Specify the name of the pod
pod: string
// +usage=Specify the name of the container
container: string
// +usage=If true, return previous terminated container logs
previous: *false | bool
// +usage=If set, show logs in relative times
sinceSeconds: *null | int
// +usage=RFC3339 timestamp, if set, show logs since this time
sinceTime: *null | string
// +usage=If true, add timestamp at the beginning of every line
timestamps: *false | bool
// +usage=If set, return the number of lines from the end of logs
tailLines: *null | int
// +usage=If set, limit the size of returned bytes
limitBytes: *null | int
}

View File

@ -1,55 +0,0 @@
import (
"vela/ql"
)
parameter: {
name: string
namespace: string
cluster: *"" | string
type: "deployment" | "statefulset" | "pod"
}
schema: {
"deployment": {
apiVersion: "apps/v1"
kind: "Deployment"
}
"statefulset": {
apiVersion: "apps/v1"
kind: "StatefulSet"
}
"pod": {
apiVersion: "v1"
kind: "Pod"
}
}
pod: ql.#Read & {
value: {
apiVersion: schema[parameter.type].apiVersion
kind: schema[parameter.type].kind
metadata: {
name: parameter.name
namespace: parameter.namespace
}
}
cluster: parameter.cluster
}
eventList: ql.#SearchEvents & {
value: {
apiVersion: schema[parameter.type].apiVersion
kind: schema[parameter.type].kind
metadata: pod.value.metadata
}
cluster: parameter.cluster
}
status: {
if eventList.err == _|_ {
events: eventList.list
}
if eventList.err != _|_ {
error: eventList.err
}
}

View File

@ -1,79 +0,0 @@
import (
"vela/ql"
)
parameter: {
name: string
namespace: string
cluster: *"" | string
}
pod: ql.#Read & {
value: {
apiVersion: "v1"
kind: "Pod"
metadata: {
name: parameter.name
namespace: parameter.namespace
}
}
cluster: parameter.cluster
}
eventList: ql.#SearchEvents & {
value: {
apiVersion: "v1"
kind: "Pod"
metadata: pod.value.metadata
}
cluster: parameter.cluster
}
podMetrics: ql.#Read & {
cluster: parameter.cluster
value: {
apiVersion: "metrics.k8s.io/v1beta1"
kind: "PodMetrics"
metadata: {
name: parameter.name
namespace: parameter.namespace
}
}
}
status: {
if pod.err == _|_ {
containers: [ for container in pod.value.spec.containers {
name: container.name
image: container.image
resources: {
if container.resources.limits != _|_ {
limits: container.resources.limits
}
if container.resources.requests != _|_ {
requests: container.resources.requests
}
if podMetrics.err == _|_ {
usage: {for containerUsage in podMetrics.value.containers {
if containerUsage.name == container.name {
cpu: containerUsage.usage.cpu
memory: containerUsage.usage.memory
}
}}
}
}
if pod.value.status.containerStatuses != _|_ {
status: {for containerStatus in pod.value.status.containerStatuses if containerStatus.name == container.name {
state: containerStatus.state
restartCount: containerStatus.restartCount
}}
}
}]
if eventList.err == _|_ {
events: eventList.list
}
}
if pod.err != _|_ {
error: pod.err
}
}

View File

@ -1,59 +0,0 @@
import (
"vela/ql"
)
parameter: {
type: string
namespace: *"" | string
cluster: *"" | string
}
schema: {
"secret": {
apiVersion: "v1"
kind: "Secret"
}
"configMap": {
apiVersion: "v1"
kind: "ConfigMap"
}
"pvc": {
apiVersion: "v1"
kind: "PersistentVolumeClaim"
}
"storageClass": {
apiVersion: "storage.k8s.io/v1"
kind: "StorageClass"
}
"ns": {
apiVersion: "v1"
kind: "Namespace"
}
"provider": {
apiVersion: "terraform.core.oam.dev/v1beta1"
kind: "Provider"
}
}
List: ql.#List & {
resource: schema[parameter.type]
filter: {
namespace: parameter.namespace
}
cluster: parameter.cluster
}
status: {
if List.err == _|_ {
if len(List.list.items) == 0 {
error: "failed to list \(parameter.type) in namespace \(parameter.namespace)"
}
if len(List.list.items) != 0 {
list: List.list.items
}
}
if List.err != _|_ {
error: List.err
}
}

View File

@ -1,38 +0,0 @@
import (
"vela/ql"
)
parameter: {
appName: string
appNs: string
cluster?: string
clusterNs?: string
}
resources: ql.#ListResourcesInApp & {
app: {
name: parameter.appName
namespace: parameter.appNs
filter: {
if parameter.cluster != _|_ {
cluster: parameter.cluster
}
if parameter.clusterNs != _|_ {
clusterNamespace: parameter.clusterNs
}
apiVersion: "v1"
kind: "Service"
}
withStatus: true
}
}
status: {
if resources.err == _|_ {
services: [ for i, resource in resources.list {
resource.object
}]
}
if resources.err != _|_ {
error: resources.err
}
}

View File

@ -1,62 +0,0 @@
{
"babelrc": false,
// Note: order is bottom-to-top and/or right-to-left
"presets": [
[
"@babel/preset-env",
{
"bugfixes": true,
"browserslistEnv": "dev",
"useBuiltIns": "entry",
"corejs": "3.10"
}
],
[
"@babel/preset-typescript",
{
"allowNamespaces": true,
"allowDeclareFields": true
}
],
[
"@babel/preset-react",
{
"runtime": "automatic"
}
]
],
"plugins": [
[
"@babel/plugin-transform-typescript",
{
"allowNamespaces": true,
"allowDeclareFields": true
}
],
// added to mitigate https://github.com/babel/babel/issues/14289
// package (and following line) can be removed once the issue is fixed and released
"@babel/plugin-proposal-class-properties",
["@babel/plugin-proposal-object-rest-spread", { "loose": true }],
"@babel/plugin-transform-react-constant-elements",
"@babel/plugin-proposal-nullish-coalescing-operator",
"@babel/plugin-proposal-optional-chaining",
"@babel/plugin-syntax-dynamic-import", // needed for `() => import()` in routes.ts
"angularjs-annotate",
"macros"
],
"env": {
"production": {
"presets": [
[
"@babel/preset-env",
{
"browserslistEnv": "production"
}
]
]
},
"hot": {
"plugins": ["react-refresh/babel"]
}
}
}

View File

@ -1,51 +0,0 @@
/*
Copyright 2022 The KubeVela Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package options
import (
"flag"
cliflag "k8s.io/component-base/cli/flag"
"k8s.io/klog/v2"
"github.com/kubevela/velaux/pkg/features"
"github.com/kubevela/velaux/pkg/server/config"
)
// ServerRunOptions contains everything necessary to create and run api server
type ServerRunOptions struct {
GenericServerRunOptions *config.Config
}
// NewServerRunOptions creates a new ServerRunOptions object with default parameters
func NewServerRunOptions() *ServerRunOptions {
s := &ServerRunOptions{
GenericServerRunOptions: config.NewConfig(),
}
return s
}
// Flags returns the complete NamedFlagSets
func (s *ServerRunOptions) Flags() (fss cliflag.NamedFlagSets) {
fs := fss.FlagSet("generic")
s.GenericServerRunOptions.AddFlags(fs, s.GenericServerRunOptions)
features.APIServerMutableFeatureGate.AddFlag(fss.FlagSet("featuregate"))
local := flag.NewFlagSet("klog", flag.ExitOnError)
klog.InitFlags(local)
fs.AddGoFlagSet(local)
return fss
}

View File

@ -1,28 +0,0 @@
/*
Copyright 2022 The KubeVela Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package options
import utilerrors "k8s.io/apimachinery/pkg/util/errors"
// Validate validates server run options, to find options' misconfiguration
func (s *ServerRunOptions) Validate() error {
var errors []error
errors = append(errors, s.GenericServerRunOptions.Validate()...)
return utilerrors.NewAggregate(errors)
}

View File

@ -1,151 +0,0 @@
/*
Copyright 2022 The KubeVela Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package app
import (
"context"
"encoding/json"
"fmt"
"os"
"os/signal"
"syscall"
"github.com/kubevela/pkg/util/profiling"
restfulspec "github.com/emicklei/go-restful-openapi/v2"
"github.com/fatih/color"
"github.com/go-openapi/spec"
"github.com/spf13/cobra"
"k8s.io/klog/v2"
"github.com/kubevela/velaux/cmd/server/app/options"
"github.com/kubevela/velaux/pkg/server"
"github.com/oam-dev/kubevela/version"
)
// NewAPIServerCommand creates a *cobra.Command object with default parameters
func NewAPIServerCommand() *cobra.Command {
s := options.NewServerRunOptions()
cmd := &cobra.Command{
Use: "apiserver",
Long: `The KubeVela API server validates and configures data for the API objects.
The API Server services REST operations and provides the frontend to the
cluster's shared state through which all other components interact.`,
RunE: func(cmd *cobra.Command, args []string) error { //nolint:revive,unused
if err := s.Validate(); err != nil {
return err
}
return Run(s)
},
SilenceUsage: true,
}
fs := cmd.Flags()
namedFlagSets := s.Flags()
for _, set := range namedFlagSets.FlagSets {
fs.AddFlagSet(set)
}
buildSwaggerCmd := &cobra.Command{
Use: "build-swagger",
Short: "Build swagger documentation of KubeVela apiserver",
RunE: func(cmd *cobra.Command, args []string) error { //nolint:revive,unused
name := "docs/apidoc/latest-swagger.json"
if len(args) > 0 {
name = args[0]
}
func() {
swagger, err := buildSwagger(s)
if err != nil {
klog.Fatal(err.Error())
}
outData, err := json.MarshalIndent(swagger, "", "\t")
if err != nil {
klog.Fatal(err.Error())
}
swaggerFile, err := os.OpenFile(name, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0600)
if err != nil {
klog.Fatal(err.Error())
}
defer func() {
if err := swaggerFile.Close(); err != nil {
klog.Errorf("close swagger file failure %s", err.Error())
}
}()
_, err = swaggerFile.Write(outData)
if err != nil {
klog.Fatal(err.Error())
}
fmt.Println("build swagger config file success")
}()
return nil
},
}
cmd.AddCommand(buildSwaggerCmd)
return cmd
}
// Run runs the specified APIServer. This should never exit.
func Run(s *options.ServerRunOptions) error {
// The server is not terminal, there is no color default.
// Force set to false, this is useful for the dry-run API.
color.NoColor = false
errChan := make(chan error)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
go profiling.StartProfilingServer(errChan)
go func() {
if err := run(ctx, s, errChan); err != nil {
errChan <- fmt.Errorf("failed to run apiserver: %w", err)
}
}()
var term = make(chan os.Signal, 1)
signal.Notify(term, os.Interrupt, syscall.SIGTERM)
select {
case <-term:
klog.Infof("Received SIGTERM, exiting gracefully...")
case err := <-errChan:
klog.Errorf("Received an error: %s, exiting gracefully...", err.Error())
return err
}
klog.Infof("See you next time!")
return nil
}
func run(ctx context.Context, s *options.ServerRunOptions, errChan chan error) error {
klog.Infof("KubeVela information: version: %v, gitRevision: %v", version.VelaVersion, version.GitRevision)
server := server.New(*s.GenericServerRunOptions)
return server.Run(ctx, errChan)
}
func buildSwagger(s *options.ServerRunOptions) (*spec.Swagger, error) {
server := server.New(*s.GenericServerRunOptions)
config, err := server.BuildRestfulConfig()
if err != nil {
return nil, err
}
return restfulspec.BuildSwagger(*config), nil
}

View File

@ -1,30 +0,0 @@
/*
Copyright 2021 The KubeVela Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"log"
"github.com/kubevela/velaux/cmd/server/app"
)
func main() {
cmd := app.NewAPIServerCommand()
if err := cmd.Execute(); err != nil {
log.Fatalln(err)
}
}

105
config/env.js Normal file
View File

@ -0,0 +1,105 @@
'use strict';
const fs = require('fs');
const path = require('path');
const paths = require('./paths');
// Make sure that including paths.js after env.js will read .env variables.
delete require.cache[require.resolve('./paths')];
const { NODE_ENV } = process.env;
if (!NODE_ENV) {
throw new Error('The NODE_ENV environment variable is required but was not specified.');
}
// https://github.com/bkeepers/dotenv#what-other-env-files-can-i-use
const dotenvFiles = [
`${paths.dotenv}.${NODE_ENV}.local`,
// Don't include `.env.local` for `test` environment
// since normally you expect tests to produce the same
// results for everyone
NODE_ENV !== 'test' && `${paths.dotenv}.local`,
`${paths.dotenv}.${NODE_ENV}`,
].filter(Boolean);
// Load environment variables from .env* files. Suppress warnings using silent
// if this file is missing. dotenv will never modify any environment variables
// that have already been set. Variable expansion is supported in .env files.
// https://github.com/motdotla/dotenv
// https://github.com/motdotla/dotenv-expand
dotenvFiles.forEach((dotenvFile) => {
if (fs.existsSync(dotenvFile)) {
require('dotenv-expand')(
require('dotenv').config({
path: dotenvFile,
}),
);
}
});
// We support resolving modules according to `NODE_PATH`.
// This lets you use absolute paths in imports inside large monorepos:
// https://github.com/facebook/create-react-app/issues/253.
// It works similar to `NODE_PATH` in Node itself:
// https://nodejs.org/api/modules.html#modules_loading_from_the_global_folders
// Note that unlike in Node, only *relative* paths from `NODE_PATH` are honored.
// Otherwise, we risk importing Node.js core modules into an app instead of webpack shims.
// https://github.com/facebook/create-react-app/issues/1023#issuecomment-265344421
// We also resolve them to make sure all tools using them work consistently.
const appDirectory = fs.realpathSync(process.cwd());
process.env.NODE_PATH = (process.env.NODE_PATH || '')
.split(path.delimiter)
.filter((folder) => folder && !path.isAbsolute(folder))
.map((folder) => path.resolve(appDirectory, folder))
.join(path.delimiter);
// Grab NODE_ENV and REACT_APP_* environment variables and prepare them to be
// injected into the application via DefinePlugin in webpack configuration.
const REACT_APP = /^REACT_APP_/i;
function getClientEnvironment(publicUrl) {
const raw = Object.keys(process.env)
.filter((key) => REACT_APP.test(key))
.reduce(
(env, key) => {
env[key] = process.env[key];
return env;
},
{
// Useful for determining whether were running in production mode.
// Most importantly, it switches React into the correct mode.
NODE_ENV: process.env.NODE_ENV || 'development',
// Useful for resolving the correct path to static assets in `public`.
// For example, <img src={process.env.PUBLIC_URL + '/img/logo.png'} />.
// This should only be used as an escape hatch. Normally you would put
// images into the `src` and `import` them in code to get their paths.
PUBLIC_URL: publicUrl,
BASE_DOMAIN: process.env.BASE_DOMAIN,
// We support configuring the sockjs pathname during development.
// These settings let a developer run multiple simultaneous projects.
// They are used as the connection `hostname`, `pathname` and `port`
// in webpackHotDevClient. They are used as the `sockHost`, `sockPath`
// and `sockPort` options in webpack-dev-server.
WDS_SOCKET_HOST: process.env.WDS_SOCKET_HOST,
WDS_SOCKET_PATH: process.env.WDS_SOCKET_PATH,
WDS_SOCKET_PORT: process.env.WDS_SOCKET_PORT,
MOCK: process.env.MOCK,
// Whether or not react-refresh is enabled.
// react-refresh is not 100% stable at this time,
// which is why it's disabled by default.
// It is defined here so it is available in the webpackHotDevClient.
FAST_REFRESH: process.env.FAST_REFRESH !== 'false',
},
);
// Stringify all values so we can feed into webpack DefinePlugin
const stringified = {
'process.env': Object.keys(raw).reduce((env, key) => {
env[key] = JSON.stringify(raw[key]);
return env;
}, {}),
};
return { raw, stringified };
}
module.exports = getClientEnvironment;

66
config/getHttpsConfig.js Normal file
View File

@ -0,0 +1,66 @@
'use strict';
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const chalk = require('react-dev-utils/chalk');
const paths = require('./paths');
// Ensure the certificate and key provided are valid and if not
// throw an easy to debug error
function validateKeyAndCerts({ cert, key, keyFile, crtFile }) {
let encrypted;
try {
// publicEncrypt will throw an error with an invalid cert
encrypted = crypto.publicEncrypt(cert, Buffer.from('test'));
} catch (err) {
throw new Error(
`The certificate "${chalk.yellow(crtFile)}" is invalid.\n${err.message}`,
);
}
try {
// privateDecrypt will throw an error with an invalid key
crypto.privateDecrypt(key, encrypted);
} catch (err) {
throw new Error(
`The certificate key "${chalk.yellow(keyFile)}" is invalid.\n${
err.message
}`,
);
}
}
// Read file and throw an error if it doesn't exist
function readEnvFile(file, type) {
if (!fs.existsSync(file)) {
throw new Error(
`You specified ${chalk.cyan(
type,
)} in your env, but the file "${chalk.yellow(file)}" can't be found.`,
);
}
return fs.readFileSync(file);
}
// Get the https config
// Return cert files if provided in env, otherwise just true or false
function getHttpsConfig() {
const { SSL_CRT_FILE, SSL_KEY_FILE, HTTPS } = process.env;
const isHttps = HTTPS === 'true';
if (isHttps && SSL_CRT_FILE && SSL_KEY_FILE) {
const crtFile = path.resolve(paths.appPath, SSL_CRT_FILE);
const keyFile = path.resolve(paths.appPath, SSL_KEY_FILE);
const config = {
cert: readEnvFile(crtFile, 'SSL_CRT_FILE'),
key: readEnvFile(keyFile, 'SSL_KEY_FILE'),
};
validateKeyAndCerts({ ...config, keyFile, crtFile });
return config;
}
return isHttps;
}
module.exports = getHttpsConfig;

134
config/modules.js Normal file
View File

@ -0,0 +1,134 @@
'use strict';
const fs = require('fs');
const path = require('path');
const paths = require('./paths');
const chalk = require('react-dev-utils/chalk');
const resolve = require('resolve');
/**
* Get additional module paths based on the baseUrl of a compilerOptions object.
*
* @param {Object} options
*/
function getAdditionalModulePaths(options = {}) {
const { baseUrl } = options;
if (!baseUrl) {
return '';
}
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
// We don't need to do anything if `baseUrl` is set to `node_modules`. This is
// the default behavior.
if (path.relative(paths.appNodeModules, baseUrlResolved) === '') {
return null;
}
// Allow the user set the `baseUrl` to `appSrc`.
if (path.relative(paths.appSrc, baseUrlResolved) === '') {
return [paths.appSrc];
}
// If the path is equal to the root directory we ignore it here.
// We don't want to allow importing from the root directly as source files are
// not transpiled outside of `src`. We do allow importing them with the
// absolute path (e.g. `src/Components/Button.js`) but we set that up with
// an alias.
if (path.relative(paths.appPath, baseUrlResolved) === '') {
return null;
}
// Otherwise, throw an error.
throw new Error(
chalk.red.bold(
"Your project's `baseUrl` can only be set to `src` or `node_modules`." +
' Create React App does not support other values at this time.',
),
);
}
/**
* Get webpack aliases based on the baseUrl of a compilerOptions object.
*
* @param {*} options
*/
function getWebpackAliases(options = {}) {
const { baseUrl } = options;
if (!baseUrl) {
return {};
}
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
if (path.relative(paths.appPath, baseUrlResolved) === '') {
return {
src: paths.appSrc,
};
}
}
/**
* Get jest aliases based on the baseUrl of a compilerOptions object.
*
* @param {*} options
*/
function getJestAliases(options = {}) {
const { baseUrl } = options;
if (!baseUrl) {
return {};
}
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
if (path.relative(paths.appPath, baseUrlResolved) === '') {
return {
'^src/(.*)$': '<rootDir>/src/$1',
};
}
}
function getModules() {
// Check if TypeScript is setup
const hasTsConfig = fs.existsSync(paths.appTsConfig);
const hasJsConfig = fs.existsSync(paths.appJsConfig);
if (hasTsConfig && hasJsConfig) {
throw new Error(
'You have both a tsconfig.json and a jsconfig.json. If you are using TypeScript please remove your jsconfig.json file.',
);
}
let config;
// If there's a tsconfig.json we assume it's a
// TypeScript project and set up the config
// based on tsconfig.json
if (hasTsConfig) {
const ts = require(resolve.sync('typescript', {
basedir: paths.appNodeModules,
}));
config = ts.readConfigFile(paths.appTsConfig, ts.sys.readFile).config;
// Otherwise we'll check if there is jsconfig.json
// for non TS projects.
} else if (hasJsConfig) {
config = require(paths.appJsConfig);
}
config = config || {};
const options = config.compilerOptions || {};
const additionalModulePaths = getAdditionalModulePaths(options);
return {
additionalModulePaths,
webpackAliases: getWebpackAliases(options),
jestAliases: getJestAliases(options),
hasTsConfig,
};
}
module.exports = getModules();

73
config/paths.js Normal file
View File

@ -0,0 +1,73 @@
'use strict';
const path = require('path');
const fs = require('fs');
const getPublicUrlOrPath = require('react-dev-utils/getPublicUrlOrPath');
// Make sure any symlinks in the project folder are resolved:
// https://github.com/facebook/create-react-app/issues/637
const appDirectory = fs.realpathSync(process.cwd());
const resolveApp = relativePath => path.resolve(appDirectory, relativePath);
// We use `PUBLIC_URL` environment variable or "homepage" field to infer
// "public path" at which the app is served.
// webpack needs to know it to put the right <script> hrefs into HTML even in
// single-page apps that may serve index.html for nested URLs like /todos/42.
// We can't use a relative path in HTML because we don't want to load something
// like /todos/42/static/js/bundle.7289d.js. We have to know the root.
const publicUrlOrPath = getPublicUrlOrPath(
process.env.NODE_ENV === 'development',
require(resolveApp('package.json')).homepage,
process.env.PUBLIC_URL,
);
const buildPath = process.env.BUILD_PATH || 'build';
const moduleFileExtensions = [
'web.mjs',
'mjs',
'web.js',
'js',
'web.ts',
'ts',
'web.tsx',
'tsx',
'json',
'web.jsx',
'jsx',
];
// Resolve file paths in the same order as webpack
const resolveModule = (resolveFn, filePath) => {
const extension = moduleFileExtensions.find(extension =>
fs.existsSync(resolveFn(`${filePath}.${extension}`)));
if (extension) {
return resolveFn(`${filePath}.${extension}`);
}
return resolveFn(`${filePath}.js`);
};
// config after eject: we're in ./config/
module.exports = {
dotenv: resolveApp('.env'),
appPath: resolveApp('.'),
appBuild: resolveApp(buildPath),
appPublic: resolveApp('public'),
appHtml: resolveApp('public/index.html'),
appIndexJs: resolveModule(resolveApp, 'src/index'),
appPackageJson: resolveApp('package.json'),
appSrc: resolveApp('src'),
appTsConfig: resolveApp('tsconfig.json'),
appJsConfig: resolveApp('jsconfig.json'),
yarnLockFile: resolveApp('yarn.lock'),
testsSetup: resolveModule(resolveApp, 'src/setupTests'),
proxySetup: resolveApp('src/setupProxy.js'),
appNodeModules: resolveApp('node_modules'),
swSrc: resolveModule(resolveApp, 'src/service-worker'),
publicUrlOrPath,
};
module.exports.moduleFileExtensions = moduleFileExtensions;

35
config/pnpTs.js Normal file
View File

@ -0,0 +1,35 @@
'use strict';
const { resolveModuleName } = require('ts-pnp');
exports.resolveModuleName = (
typescript,
moduleName,
containingFile,
compilerOptions,
resolutionHost,
) => {
return resolveModuleName(
moduleName,
containingFile,
compilerOptions,
resolutionHost,
typescript.resolveModuleName,
);
};
exports.resolveTypeReferenceDirective = (
typescript,
moduleName,
containingFile,
compilerOptions,
resolutionHost,
) => {
return resolveModuleName(
moduleName,
containingFile,
compilerOptions,
resolutionHost,
typescript.resolveTypeReferenceDirective,
);
};

680
config/webpack.config.js Normal file
View File

@ -0,0 +1,680 @@
'use strict';
const fs = require('fs');
const path = require('path');
const webpack = require('webpack');
const resolve = require('resolve');
const PnpWebpackPlugin = require('pnp-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
const InlineChunkHtmlPlugin = require('react-dev-utils/InlineChunkHtmlPlugin');
const TerserPlugin = require('terser-webpack-plugin');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
const safePostCssParser = require('postcss-safe-parser');
const ManifestPlugin = require('webpack-manifest-plugin');
const InterpolateHtmlPlugin = require('react-dev-utils/InterpolateHtmlPlugin');
const WorkboxWebpackPlugin = require('workbox-webpack-plugin');
const WatchMissingNodeModulesPlugin = require('react-dev-utils/WatchMissingNodeModulesPlugin');
const ModuleScopePlugin = require('react-dev-utils/ModuleScopePlugin');
const getCSSModuleLocalIdent = require('react-dev-utils/getCSSModuleLocalIdent');
//const ESLintPlugin = require('eslint-webpack-plugin');
const paths = require('./paths');
const modules = require('./modules');
const getClientEnvironment = require('./env');
const ModuleNotFoundPlugin = require('react-dev-utils/ModuleNotFoundPlugin');
const ForkTsCheckerWebpackPlugin = require('react-dev-utils/ForkTsCheckerWebpackPlugin');
const typescriptFormatter = require('react-dev-utils/typescriptFormatter');
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
const postcssNormalize = require('postcss-normalize');
const statements = require('tsx-control-statements').default;
const appPackageJson = require(paths.appPackageJson);
const tsxControlStatements = require('tsx-control-statements');
// Source maps are resource heavy and can cause out of memory issue for large source files.
const shouldUseSourceMap = process.env.GENERATE_SOURCEMAP !== 'false';
const webpackDevClientEntry = require.resolve('react-dev-utils/webpackHotDevClient');
const reactRefreshOverlayEntry = require.resolve('react-dev-utils/refreshOverlayInterop');
// Some apps do not need the benefits of saving a web request, so not inlining the chunk
// makes for a smoother build process.
const shouldInlineRuntimeChunk = process.env.INLINE_RUNTIME_CHUNK !== 'false';
//const emitErrorsAsWarnings = process.env.ESLINT_NO_DEV_ERRORS === 'true';
//const disableESLintPlugin = process.env.DISABLE_ESLINT_PLUGIN === 'true';
const imageInlineSizeLimit = parseInt(process.env.IMAGE_INLINE_SIZE_LIMIT || '10000');
// Check if TypeScript is setup
const useTypeScript = fs.existsSync(paths.appTsConfig);
// Get the path to the uncompiled service worker (if it exists).
const { swSrc } = paths;
// style files regexes
const cssRegex = /\.css$/;
const cssModuleRegex = /\.module\.css$/;
const sassRegex = /\.(scss|sass)$/;
const sassModuleRegex = /\.module\.(scss|sass)$/;
const lessRegex = /\.less$/;
// get git info from command line
let commitHash = require('child_process').execSync('git rev-parse --short HEAD').toString().trim();
// This is the production and development configuration.
// It is focused on developer experience, fast rebuilds, and a minimal bundle.
module.exports = function (webpackEnv) {
const isEnvDevelopment = webpackEnv === 'development';
const isEnvProduction = webpackEnv === 'production';
// Variable used for enabling profiling in Production
// passed into alias object. Uses a flag if passed into the build command
const isEnvProductionProfile = isEnvProduction && process.argv.includes('--profile');
// We will provide `paths.publicUrlOrPath` to our app
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
// Omit trailing slash as %PUBLIC_URL%/xyz looks better than %PUBLIC_URL%xyz.
// Get environment variables to inject into our app.
const env = getClientEnvironment(paths.publicUrlOrPath.slice(0, -1));
const shouldUseReactRefresh = env.raw.FAST_REFRESH;
// common function to get style loaders
const getStyleLoaders = (cssOptions, preProcessor) => {
const loaders = [
isEnvDevelopment && require.resolve('style-loader'),
isEnvProduction && {
loader: MiniCssExtractPlugin.loader,
// css is located in `static/css`, use '../../' to locate index.html folder
// in production `paths.publicUrlOrPath` can be a relative path
options: paths.publicUrlOrPath.startsWith('.') ? { publicPath: '../../' } : {},
},
{
loader: require.resolve('css-loader'),
options: cssOptions,
},
{
// Options for PostCSS as we reference these options twice
// Adds vendor prefixing based on your specified browser support in
// package.json
loader: require.resolve('postcss-loader'),
options: {
// Necessary for external CSS imports to work
// https://github.com/facebook/create-react-app/issues/2677
ident: 'postcss',
plugins: () => [
require('postcss-flexbugs-fixes'),
require('postcss-preset-env')({
autoprefixer: {
flexbox: 'no-2009',
},
stage: 3,
}),
// Adds PostCSS Normalize as the reset css with default options,
// so that it honors browserslist config in package.json
// which in turn let's users customize the target behavior as per their needs.
postcssNormalize(),
],
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
},
},
].filter(Boolean);
if (preProcessor) {
loaders.push(
{
loader: require.resolve('resolve-url-loader'),
options: {
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
root: paths.appSrc,
},
},
{
loader: require.resolve(preProcessor),
options: {
sourceMap: true,
},
},
);
}
return loaders;
};
return {
mode: isEnvProduction ? 'production' : isEnvDevelopment && 'development',
// Stop compilation early in production
bail: isEnvProduction,
devtool: isEnvProduction
? shouldUseSourceMap
? 'source-map'
: false
: isEnvDevelopment && 'cheap-module-source-map',
// These are the "entry points" to our application.
// This means they will be the "root" imports that are included in JS bundle.
entry:
isEnvDevelopment && !shouldUseReactRefresh
? [
// Include an alternative client for WebpackDevServer. A client's job is to
// connect to WebpackDevServer by a socket and get notified about changes.
// When you save a file, the client will either apply hot updates (in case
// of CSS changes), or refresh the page (in case of JS changes). When you
// make a syntax error, this client will display a syntax error overlay.
// Note: instead of the default WebpackDevServer client, we use a custom one
// to bring better experience for Create React App users. You can replace
// the line below with these two lines if you prefer the stock client:
//
// require.resolve('webpack-dev-server/client') + '?/',
// require.resolve('webpack/hot/dev-server'),
//
// When using the experimental react-refresh integration,
// the webpack plugin takes care of injecting the dev client for us.
webpackDevClientEntry,
// Finally, this is your app's code:
paths.appIndexJs,
// We include the app code last so that if there is a runtime error during
// initialization, it doesn't blow up the WebpackDevServer client, and
// changing JS code would still trigger a refresh.
]
: paths.appIndexJs,
output: {
// The build folder.
path: isEnvProduction ? paths.appBuild : undefined,
// Add /* filename */ comments to generated require()s in the output.
pathinfo: isEnvDevelopment,
// There will be one main bundle, and one file per asynchronous chunk.
// In development, it does not produce real files.
filename: isEnvProduction
? 'static/js/[name].[contenthash:8].js'
: isEnvDevelopment && 'static/js/bundle.js',
// TODO: remove this when upgrading to webpack 5
futureEmitAssets: true,
// There are also additional JS chunk files if you use code splitting.
chunkFilename: isEnvProduction
? 'static/js/[name].[contenthash:8].chunk.js'
: isEnvDevelopment && 'static/js/[name].chunk.js',
// webpack uses `publicPath` to determine where the app is being served from.
// It requires a trailing slash, or the file assets will get an incorrect path.
// We inferred the "public path" (such as / or /my-project) from homepage.
publicPath: paths.publicUrlOrPath,
// Point sourcemap entries to original disk location (format as URL on Windows)
devtoolModuleFilenameTemplate: isEnvProduction
? (info) => path.relative(paths.appSrc, info.absoluteResourcePath).replace(/\\/g, '/')
: isEnvDevelopment &&
((info) => path.resolve(info.absoluteResourcePath).replace(/\\/g, '/')),
// Prevents conflicts when multiple webpack runtimes (from different apps)
// are used on the same page.
jsonpFunction: `webpackJsonp${appPackageJson.name}`,
// this defaults to 'window', but by setting it to 'this' then
// module chunks which are built will work in web workers as well.
globalObject: 'this',
},
optimization: {
minimize: isEnvProduction,
minimizer: [
// This is only used in production mode
new TerserPlugin({
terserOptions: {
parse: {
// We want terser to parse ecma 8 code. However, we don't want it
// to apply any minification steps that turns valid ecma 5 code
// into invalid ecma 5 code. This is why the 'compress' and 'output'
// sections only apply transformations that are ecma 5 safe
// https://github.com/facebook/create-react-app/pull/4234
ecma: 8,
},
compress: {
ecma: 5,
warnings: false,
// Disabled because of an issue with Uglify breaking seemingly valid code:
// https://github.com/facebook/create-react-app/issues/2376
// Pending further investigation:
// https://github.com/mishoo/UglifyJS2/issues/2011
comparisons: false,
// Disabled because of an issue with Terser breaking valid code:
// https://github.com/facebook/create-react-app/issues/5250
// Pending further investigation:
// https://github.com/terser-js/terser/issues/120
inline: 2,
},
mangle: {
safari10: true,
},
// Added for profiling in devtools
keep_classnames: isEnvProductionProfile,
keep_fnames: isEnvProductionProfile,
output: {
ecma: 5,
comments: false,
// Turned on because emoji and regex is not minified properly using default
// https://github.com/facebook/create-react-app/issues/2488
ascii_only: true,
},
},
sourceMap: shouldUseSourceMap,
}),
// This is only used in production mode
new OptimizeCSSAssetsPlugin({
cssProcessorOptions: {
parser: safePostCssParser,
map: shouldUseSourceMap
? {
// `inline: false` forces the sourcemap to be output into a
// separate file
inline: false,
// `annotation: true` appends the sourceMappingURL to the end of
// the css file, helping the browser find the sourcemap
annotation: true,
}
: false,
},
cssProcessorPluginOptions: {
preset: ['default', { minifyFontValues: { removeQuotes: false } }],
},
}),
],
// Automatically split vendor and commons
// https://twitter.com/wSokra/status/969633336732905474
// https://medium.com/webpack/webpack-4-code-splitting-chunk-graph-and-the-splitchunks-optimization-be739a861366
splitChunks: {
chunks: 'all',
name: isEnvDevelopment,
},
// Keep the runtime chunk separated to enable long term caching
// https://twitter.com/wSokra/status/969679223278505985
// https://github.com/facebook/create-react-app/issues/5358
runtimeChunk: {
name: (entrypoint) => `runtime-${entrypoint.name}`,
},
},
resolve: {
// This allows you to set a fallback for where webpack should look for modules.
// We placed these paths second because we want `node_modules` to "win"
// if there are any conflicts. This matches Node resolution mechanism.
// https://github.com/facebook/create-react-app/issues/253
modules: ['node_modules', paths.appNodeModules].concat(modules.additionalModulePaths || []),
// These are the reasonable defaults supported by the Node ecosystem.
// We also include JSX as a common component filename extension to support
// some tools, although we do not recommend using it, see:
// https://github.com/facebook/create-react-app/issues/290
// `web` extension prefixes have been added for better support
// for React Native Web.
extensions: paths.moduleFileExtensions
.map((ext) => `.${ext}`)
.filter((ext) => useTypeScript || !ext.includes('ts')),
alias: {
// Support React Native Web
// https://www.smashingmagazine.com/2016/08/a-glimpse-into-the-future-with-react-native-for-web/
'react-native': 'react-native-web',
// Allows for better profiling with ReactDevTools
...(isEnvProductionProfile && {
'react-dom$': 'react-dom/profiling',
'scheduler/tracing': 'scheduler/tracing-profiling',
}),
...(modules.webpackAliases || {}),
},
plugins: [
// Adds support for installing with Plug'n'Play, leading to faster installs and adding
// guards against forgotten dependencies and such.
PnpWebpackPlugin,
// Prevents users from importing files from outside of src/ (or node_modules/).
// This often causes confusion because we only process files within src/ with babel.
// To fix this, we prevent you from importing files out of src/ -- if you'd like to,
// please link the files into your node_modules/ and let module-resolution kick in.
// Make sure your source files are compiled, as they will not be processed in any way.
new ModuleScopePlugin(paths.appSrc, [paths.appPackageJson, reactRefreshOverlayEntry]),
],
},
resolveLoader: {
plugins: [
// Also related to Plug'n'Play, but this time it tells webpack to load its loaders
// from the current package.
PnpWebpackPlugin.moduleLoader(module),
],
},
module: {
strictExportPresence: true,
rules: [
// Disable require.ensure as it's not a standard language feature.
{ parser: { requireEnsure: false } },
{
// "oneOf" will traverse all following loaders until one will
// match the requirements. When no loader matches it will fall
// back to the "file" loader at the end of the loader list.
oneOf: [
// TODO: Merge this config once `image/avif` is in the mime-db
// https://github.com/jshttp/mime-db
{
test: [/\.avif$/],
loader: require.resolve('url-loader'),
options: {
limit: imageInlineSizeLimit,
mimetype: 'image/avif',
name: 'static/media/[name].[hash:8].[ext]',
},
},
// "url" loader works like "file" loader except that it embeds assets
// smaller than specified limit in bytes as data URLs to avoid requests.
// A missing `test` is equivalent to a match.
{
test: [/\.bmp$/, /\.gif$/, /\.jpe?g$/, /\.png$/],
loader: require.resolve('url-loader'),
options: {
limit: imageInlineSizeLimit,
name: 'static/media/[name].[hash:8].[ext]',
},
},
// Process application JS with Babel.
// The preset includes JSX, Flow, TypeScript, and some ESnext features.
{
test: /\.(ts|tsx)$/,
include: paths.appSrc,
use: [
{
loader: require.resolve('ts-loader'),
options: {
// disable type checker - we will use it in fork plugin
transpileOnly: true,
getCustomTransformers: () => ({
before: [tsxControlStatements.default()],
}),
},
},
],
},
{
test: /\.(js|mjs|jsx|ts|tsx)$/,
include: paths.appSrc,
loader: 'babel-loader',
options: {
presets: ['@babel/preset-env', '@babel/preset-typescript', '@babel/preset-react'],
plugins: [
[
'@babel/plugin-proposal-decorators',
{
legacy: true,
},
],
].filter(Boolean),
// This is a feature of `babel-loader` for webpack (not Babel itself).
// It enables caching results in ./node_modules/.cache/babel-loader/
// directory for faster rebuilds.
cacheDirectory: true,
// See #6846 for context on why cacheCompression is disabled
cacheCompression: false,
compact: isEnvProduction,
},
},
// "postcss" loader applies autoprefixer to our CSS.
// "css" loader resolves paths in CSS and adds assets as dependencies.
// "style" loader turns CSS into JS modules that inject <style> tags.
// In production, we use MiniCSSExtractPlugin to extract that CSS
// to a file, but in development "style" loader enables hot editing
// of CSS.
// By default we support CSS Modules with the extension .module.css
{
test: cssRegex,
exclude: cssModuleRegex,
use: getStyleLoaders({
importLoaders: 1,
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
}),
// Don't consider CSS imports dead code even if the
// containing package claims to have no side effects.
// Remove this when webpack adds a warning or an error for this.
// See https://github.com/webpack/webpack/issues/6571
sideEffects: true,
},
// Adds support for CSS Modules (https://github.com/css-modules/css-modules)
// using the extension .module.css
{
test: cssModuleRegex,
use: getStyleLoaders({
importLoaders: 1,
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
modules: {
getLocalIdent: getCSSModuleLocalIdent,
},
}),
},
// Opt-in support for SASS (using .scss or .sass extensions).
// By default we support SASS Modules with the
// extensions .module.scss or .module.sass
{
test: sassRegex,
exclude: sassModuleRegex,
use: getStyleLoaders(
{
importLoaders: 3,
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
},
'sass-loader',
),
// Don't consider CSS imports dead code even if the
// containing package claims to have no side effects.
// Remove this when webpack adds a warning or an error for this.
// See https://github.com/webpack/webpack/issues/6571
sideEffects: true,
},
{
test: lessRegex,
use: [
{
loader: 'style-loader',
},
{
loader: 'css-loader',
},
{
loader: 'less-loader',
},
],
},
// Adds support for CSS Modules, but using SASS
// using the extension .module.scss or .module.sass
{
test: sassModuleRegex,
use: getStyleLoaders(
{
importLoaders: 3,
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
modules: {
getLocalIdent: getCSSModuleLocalIdent,
},
},
'sass-loader',
),
},
// "file" loader makes sure those assets get served by WebpackDevServer.
// When you `import` an asset, you get its (virtual) filename.
// In production, they would get copied to the `build` folder.
// This loader doesn't use a "test" so it will catch all modules
// that fall through the other loaders.
{
loader: require.resolve('file-loader'),
// Exclude `js` files to keep "css" loader working as it injects
// its runtime that would otherwise be processed through "file" loader.
// Also exclude `html` and `json` extensions so they get processed
// by webpacks internal loaders.
exclude: [/\.(js|mjs|jsx|ts|tsx)$/, /\.html$/, /\.json$/],
options: {
name: 'static/media/[name].[hash:8].[ext]',
},
},
// ** STOP ** Are you adding a new loader?
// Make sure to add the new loader(s) before the "file" loader.
],
},
],
},
plugins: [
// Generates an `index.html` file with the <script> injected.
new HtmlWebpackPlugin(
Object.assign(
{},
{
inject: true,
template: paths.appHtml,
},
isEnvProduction
? {
minify: {
removeComments: true,
collapseWhitespace: true,
removeRedundantAttributes: true,
useShortDoctype: true,
removeEmptyAttributes: true,
removeStyleLinkTypeAttributes: true,
keepClosingSlash: true,
minifyJS: true,
minifyCSS: true,
minifyURLs: true,
},
}
: undefined,
),
),
// Inlines the webpack runtime script. This script is too small to warrant
// a network request.
// https://github.com/facebook/create-react-app/issues/5358
isEnvProduction &&
shouldInlineRuntimeChunk &&
new InlineChunkHtmlPlugin(HtmlWebpackPlugin, [/runtime-.+[.]js/]),
// Makes some environment variables available in index.html.
// The public URL is available as %PUBLIC_URL% in index.html, e.g.:
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
// It will be an empty string unless you specify "homepage"
// in `package.json`, in which case it will be the pathname of that URL.
new InterpolateHtmlPlugin(HtmlWebpackPlugin, env.raw),
// This gives some necessary context to module not found errors, such as
// the requesting resource.
new ModuleNotFoundPlugin(paths.appPath),
// Makes some environment variables available to the JS code, for example:
// if (process.env.NODE_ENV === 'production') { ... }. See `./env.js`.
// It is absolutely essential that NODE_ENV is set to production
// during a production build.
// Otherwise React will be compiled in the very slow development mode.
new webpack.DefinePlugin(env.stringified),
// This is necessary to emit hot updates (CSS and Fast Refresh):
isEnvDevelopment && new webpack.HotModuleReplacementPlugin(),
// Experimental hot reloading for React .
// https://github.com/facebook/react/tree/master/packages/react-refresh
isEnvDevelopment &&
shouldUseReactRefresh &&
new ReactRefreshWebpackPlugin({
overlay: {
entry: webpackDevClientEntry,
// The expected exports are slightly different from what the overlay exports,
// so an interop is included here to enable feedback on module-level errors.
module: reactRefreshOverlayEntry,
// Since we ship a custom dev client and overlay integration,
// the bundled socket handling logic can be eliminated.
sockIntegration: false,
},
}),
// Watcher doesn't work well if you mistype casing in a path so we use
// a plugin that prints an error when you attempt to do this.
// See https://github.com/facebook/create-react-app/issues/240
isEnvDevelopment && new CaseSensitivePathsPlugin(),
// If you require a missing module and then `npm install` it, you still have
// to restart the development server for webpack to discover it. This plugin
// makes the discovery automatic so you don't have to restart.
// See https://github.com/facebook/create-react-app/issues/186
isEnvDevelopment && new WatchMissingNodeModulesPlugin(paths.appNodeModules),
isEnvProduction &&
new MiniCssExtractPlugin({
// Options similar to the same options in webpackOptions.output
// both options are optional
filename: 'static/css/[name].[contenthash:8].css',
chunkFilename: 'static/css/[name].[contenthash:8].chunk.css',
}),
// Generate an asset manifest file with the following content:
// - "files" key: Mapping of all asset filenames to their corresponding
// output file so that tools can pick it up without having to parse
// `index.html`
// - "entrypoints" key: Array of files which are included in `index.html`,
// can be used to reconstruct the HTML if necessary
new ManifestPlugin({
fileName: 'asset-manifest.json',
publicPath: paths.publicUrlOrPath,
generate: (seed, files, entrypoints) => {
const manifestFiles = files.reduce((manifest, file) => {
manifest[file.name] = file.path;
return manifest;
}, seed);
const entrypointFiles = entrypoints.main.filter((fileName) => !fileName.endsWith('.map'));
return {
files: manifestFiles,
entrypoints: entrypointFiles,
};
},
}),
// Moment.js is an extremely popular library that bundles large locale files
// by default due to how webpack interprets its code. This is a practical
// solution that requires the user to opt into importing specific locales.
// https://github.com/jmblog/how-to-optimize-momentjs-with-webpack
// You can remove this if you don't use Moment.js:
new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/),
// Generate a service worker script that will precache, and keep up to date,
// the HTML & assets that are part of the webpack build.
isEnvProduction &&
fs.existsSync(swSrc) &&
new WorkboxWebpackPlugin.InjectManifest({
swSrc,
dontCacheBustURLsMatching: /\.[0-9a-f]{8}\./,
exclude: [/\.map$/, /asset-manifest\.json$/, /LICENSE/],
// Bump up the default maximum size (2mb) that's precached,
// to make lazy-loading failure scenarios less likely.
// See https://github.com/cra-template/pwa/issues/13#issuecomment-722667270
maximumFileSizeToCacheInBytes: 5 * 1024 * 1024,
}),
// TypeScript type checking
useTypeScript &&
new ForkTsCheckerWebpackPlugin({
typescript: resolve.sync('typescript', {
basedir: paths.appNodeModules,
}),
async: isEnvDevelopment,
checkSyntacticErrors: true,
resolveModuleNameModule: process.versions.pnp ? `${__dirname}/pnpTs.js` : undefined,
resolveTypeReferenceDirectiveModule: process.versions.pnp
? `${__dirname}/pnpTs.js`
: undefined,
tsconfig: paths.appTsConfig,
reportFiles: [
// This one is specifically to match during CI tests,
// as micromatch doesn't match
// '../cra-template-typescript/template/src/App.tsx'
// otherwise.
'../**/src/**/*.{ts,tsx}',
'**/src/**/*.{ts,tsx}',
'!**/src/**/__tests__/**',
'!**/src/**/?(*.)(spec|test).*',
'!**/src/setupProxy.*',
'!**/src/setupTests.*',
],
silent: true,
// The formatter is invoked directly in WebpackDevServerUtils during development
formatter: isEnvProduction ? typescriptFormatter : undefined,
}),
new webpack.DefinePlugin({
__COMMIT_HASH__: JSON.stringify(commitHash),
}),
].filter(Boolean),
// Some libraries import Node modules but don't use them in the browser.
// Tell webpack to provide empty mocks for them so importing them works.
node: {
module: 'empty',
dgram: 'empty',
dns: 'mock',
fs: 'empty',
http2: 'empty',
net: 'empty',
tls: 'empty',
child_process: 'empty',
},
// Turn off performance processing because we utilize
// our own hints via the FileSizeReporter
performance: false,
};
};

View File

@ -0,0 +1,130 @@
'use strict';
const fs = require('fs');
const errorOverlayMiddleware = require('react-dev-utils/errorOverlayMiddleware');
const evalSourceMapMiddleware = require('react-dev-utils/evalSourceMapMiddleware');
const noopServiceWorkerMiddleware = require('react-dev-utils/noopServiceWorkerMiddleware');
const ignoredFiles = require('react-dev-utils/ignoredFiles');
const redirectServedPath = require('react-dev-utils/redirectServedPathMiddleware');
const paths = require('./paths');
const getHttpsConfig = require('./getHttpsConfig');
const host = process.env.HOST || '0.0.0.0';
const sockHost = process.env.WDS_SOCKET_HOST;
const sockPath = process.env.WDS_SOCKET_PATH; // default: '/sockjs-node'
const sockPort = process.env.WDS_SOCKET_PORT;
module.exports = function (proxy, allowedHost) {
return {
// WebpackDevServer 2.4.3 introduced a security fix that prevents remote
// websites from potentially accessing local content through DNS rebinding:
// https://github.com/webpack/webpack-dev-server/issues/887
// https://medium.com/webpack/webpack-dev-server-middleware-security-issues-1489d950874a
// However, it made several existing use cases such as development in cloud
// environment or subdomains in development significantly more complicated:
// https://github.com/facebook/create-react-app/issues/2271
// https://github.com/facebook/create-react-app/issues/2233
// While we're investigating better solutions, for now we will take a
// compromise. Since our WDS configuration only serves files in the `public`
// folder we won't consider accessing them a vulnerability. However, if you
// use the `proxy` feature, it gets more dangerous because it can expose
// remote code execution vulnerabilities in backends like Django and Rails.
// So we will disable the host check normally, but enable it if you have
// specified the `proxy` setting. Finally, we let you override it if you
// really know what you're doing with a special environment variable.
disableHostCheck:
!proxy || process.env.DANGEROUSLY_DISABLE_HOST_CHECK === 'true',
// Enable gzip compression of generated files.
compress: true,
// Silence WebpackDevServer's own logs since they're generally not useful.
// It will still show compile warnings and errors with this setting.
clientLogLevel: 'none',
// By default WebpackDevServer serves physical files from current directory
// in addition to all the virtual build products that it serves from memory.
// This is confusing because those files wont automatically be available in
// production build folder unless we copy them. However, copying the whole
// project directory is dangerous because we may expose sensitive files.
// Instead, we establish a convention that only files in `public` directory
// get served. Our build script will copy `public` into the `build` folder.
// In `index.html`, you can get URL of `public` folder with %PUBLIC_URL%:
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
// In JavaScript code, you can access it with `process.env.PUBLIC_URL`.
// Note that we only recommend to use `public` folder as an escape hatch
// for files like `favicon.ico`, `manifest.json`, and libraries that are
// for some reason broken when imported through webpack. If you just want to
// use an image, put it in `src` and `import` it from JavaScript instead.
contentBase: paths.appPublic,
contentBasePublicPath: paths.publicUrlOrPath,
// By default files from `contentBase` will not trigger a page reload.
watchContentBase: true,
// Enable hot reloading server. It will provide WDS_SOCKET_PATH endpoint
// for the WebpackDevServer client so it can learn when the files were
// updated. The WebpackDevServer client is included as an entry point
// in the webpack development configuration. Note that only changes
// to CSS are currently hot reloaded. JS changes will refresh the browser.
hot: true,
// Use 'ws' instead of 'sockjs-node' on server since we're using native
// websockets in `webpackHotDevClient`.
transportMode: 'ws',
// Prevent a WS client from getting injected as we're already including
// `webpackHotDevClient`.
injectClient: false,
// Enable custom sockjs pathname for websocket connection to hot reloading server.
// Enable custom sockjs hostname, pathname and port for websocket connection
// to hot reloading server.
sockHost,
sockPath,
sockPort,
// It is important to tell WebpackDevServer to use the same "publicPath" path as
// we specified in the webpack config. When homepage is '.', default to serving
// from the root.
// remove last slash so user can land on `/test` instead of `/test/`
publicPath: paths.publicUrlOrPath.slice(0, -1),
// WebpackDevServer is noisy by default so we emit custom message instead
// by listening to the compiler events with `compiler.hooks[...].tap` calls above.
quiet: true,
// Reportedly, this avoids CPU overload on some systems.
// https://github.com/facebook/create-react-app/issues/293
// src/node_modules is not ignored to support absolute imports
// https://github.com/facebook/create-react-app/issues/1065
watchOptions: {
ignored: ignoredFiles(paths.appSrc),
},
https: getHttpsConfig(),
host,
overlay: false,
historyApiFallback: {
// Paths with dots should still use the history fallback.
// See https://github.com/facebook/create-react-app/issues/387.
disableDotRule: true,
index: paths.publicUrlOrPath,
},
public: allowedHost,
// `proxy` is run between `before` and `after` `webpack-dev-server` hooks
proxy,
before(app, server) {
// Keep `evalSourceMapMiddleware` and `errorOverlayMiddleware`
// middlewares before `redirectServedPath` otherwise will not have any effect
// This lets us fetch source contents from webpack for the error overlay
app.use(evalSourceMapMiddleware(server));
// This lets us open files from the runtime error overlay.
app.use(errorOverlayMiddleware());
if (fs.existsSync(paths.proxySetup)) {
// This registers user provided middleware for proxy reasons
require(paths.proxySetup)(app);
}
},
after(app) {
// Redirect to `PUBLIC_URL` or `homepage` from `package.json` if url not match
app.use(redirectServedPath(paths.publicUrlOrPath));
// This service worker file is effectively a 'no-op' that will reset any
// previous service worker registered for the same host:port combination.
// We do this in development to avoid hitting the production cache if
// it used the same host and port.
// https://github.com/facebook/create-react-app/issues/2272#issuecomment-302832432
app.use(noopServiceWorkerMiddleware(paths.publicUrlOrPath));
},
};
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,107 +0,0 @@
# How to build a plugin
VelUX plugin could help you customs any page applications. Most default and extensional runtime APIs make you develop the application easies.
## 1. Build the local server environment.
Refer [contributing](./docs/contributing/velaux.md) guide.
## 2. Initialize the plugin project.
```bash
mkdir custom-plugins
$pluginName=<your_plugin_name>
git clone https://github.com/kubevela-contrib/velaux-plugin-template custom-plugins/$pluginName
cd custom-plugins/$pluginName
```
## 3. Edit the plugin metadata.
* src/plugin.json: Plugin metadata, you should change it.
* package.json: Project metadata, you should change the project name, version, and description etc.
Plugin metadata spec:
```cue
{
"type": "page-app",
"name": string,
"id": string,
"info": {
"keywords": []string,
"description": string,
"author": {
"name": string
},
"logos": {
"small": string,
"large": string
},
"screenshots": [],
"version": string,
"updated": string
},
"backend"?: bool,
"proxy"?: bool,
"backendType?": "kube-api" | "kube-service",
"routes"?: [
{
"path": string,
"permission": {
"resource": string,
"action": string
}
}
],
"backendService"?: {
"name": string
"namespace"?: string
},
"kubePermissions"?: [
{
"apiGroups": string[],
"resources": string[],
"verbs": string[],
}
]
}
```
There are some example plugin configs. https://github.com/kubevela/velaux/tree/main/docs/plugins
## 4. Develop the plugin
```bash
yarn install
yarn dev
```
### Request the backend API
```js
import { getBackendSrv } from '@velaux/ui';
// Request the core APIs
getBackendSrv().get('/api/v1/clusters').then(res=>{console.log(res)})
// Request the plugin proxy APIs
getBackendSrv().get(`/proxy/plugins/${pluginID}/${realPath}`).then(res=>{console.log(res)})
```
Core API Reference: https://kubevela.net/docs/platform-engineers/openapi/overview
### UI Components
```js
import { Table, Form } from '@velaux/ui';
```
UI Component Reference: https://fusion.design/pc/component/box?themeid=2
## 5. Start the server to debug the plugin
```bash
go run ./cmd/server/main.go --plugin-path <custom-plugins-path>
```

View File

@ -1,127 +1,30 @@
# Contribute VelaUX
## Prepare the local environment
Before start, please make sure you have already started the vela api server environment.
### Install VelaCore
If your api server address is 'http://127.0.0.1:8000', configure the api server address:
1. Check requirements
```shell
echo "BASE_DOMAIN='http://127.0.0.1:8000'" > .env
```
* VelaD support installing KubeVela on machines based on these OS: Linux, macOS, Windows.
* If you are using Linux or macOS, make sure your machine have `curl` installed.
* If you are using macOS or Windows, make sure you've already installed [Docker](https://www.docker.com/products/docker-desktop).
Make sure you have installed [yarn](https://classic.yarnpkg.com/en/docs/install).
2. Download the binary.
* MacOS/Linux
```bash
curl -fsSl https://static.kubevela.net/script/install-velad.sh | bash
```
* Windows
```bash
powershell -Command "iwr -useb https://static.kubevela.net/script/install-velad.ps1 | iex"
```
3. Install
```bash
velad install
```
4. Install VelaUX environment
```bash
vela addon enable ./addon
```
## Start the server on local
Make sure you have installed [yarn 2.0](https://yarnpkg.com/getting-started/install), This is required.
Run this project:
```shell
yarn install
yarn build-packages
## Build the frontend and watch the code changes
yarn dev
yarn start
```
### Start the server
* Check the code style
```shell
## Setting the KUBECONFIG environment
export KUBECONFIG=$(velad kubeconfig --host)
make run-server
```
Waiting the server started, open http://127.0.0.1:8000 via the browser.
Now, the local environment is built successfully, you could write the server or frontend code.
Notes:
* If you change the frontend code, it will take effect after the website refresh.
* If you change the server code, it will take effect after restarted the server.
### Check the code style
```shell
# Frontend
yarn lint
# Server
make reviewable
```
### Test the code
For testing server kubebuilder and its dependency tools are required. To install them you can use:
```shell
make setup-test-server
```
Frontend:
* Test the code
```shell
yarn test
```
Server:
```shell
make unit-test-server
make e2e-server-test
```
### Generate the OpenAPI schema
```shell
make build-swagger
```
### Config yarn2 in vscode
Add following config in `settings.json`
```json
{
"search.exclude": {
"**/.yarn": true,
"**/.pnp.*": true
},
"typescript.tsdk": ".yarn/sdks/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true
}
```
## Develop the plugin
Reference: [How to build a plugin](./how-to-build-plugin)
## References
* UI framework: [@alifd/next](https://fusion.design/)
* Icons: [react-icons](https://react-icons.github.io/react-icons)

View File

@ -1,52 +0,0 @@
{
"type": "page-app",
"name": "Backend Kube API Allow Route",
"id": "backend-kube-api-allow-route",
"info": {
"keywords": [
"app"
],
"description": "The app demo plugin",
"author": {
"name": "KubeVela"
},
"logos": {
"small": "img/logo.svg",
"large": "img/logo.svg"
},
"screenshots": [],
"version": "0.0.1",
"updated": "2023-03-30"
},
"backend": true,
"proxy": true,
"backendType": "kube-api",
"kubePermissions": [
{
"apiGroups": [
""
],
"resources": [
"nodes",
"nodes/status"
],
"verbs": [
"get",
"list"
]
}
],
"routes": [
{
"path": "/api/v1/nodes",
"permission": {
"resource": "node",
"action": "detail"
}
}
],
"dependencies": {
"velauxDependency": ">=1.8.0",
"plugins": []
}
}

View File

@ -1,55 +0,0 @@
{
"type": "page-app",
"name": "Backend Kube API",
"id": "backend-kube-api-node",
"info": {
"keywords": [
"app"
],
"description": "The app demo plugin",
"author": {
"name": "KubeVela"
},
"logos": {
"small": "img/logo.svg",
"large": "img/logo.svg"
},
"screenshots": [],
"version": "0.0.1",
"updated": "2023-03-30"
},
"backend": true,
"proxy": true,
"backendType": "kube-api",
"routes": [
{
"path": "/nodes/:node",
"method": "GET",
"proxyHeaders": [
{
"name": "Authorization",
"value": "Bearer test"
}
]
}
],
"kubePermissions": [
{
"apiGroups": [
""
],
"resources": [
"nodes",
"nodes/status"
],
"verbs": [
"get",
"list"
]
}
],
"dependencies": {
"velauxDependency": ">=1.8.0",
"plugins": []
}
}

View File

@ -1,43 +0,0 @@
{
"type": "page-app",
"name": "Backend Kube API",
"id": "backend-kube-api",
"info": {
"keywords": [
"app"
],
"description": "The app demo plugin",
"author": {
"name": "KubeVela"
},
"logos": {
"small": "img/logo.svg",
"large": "img/logo.svg"
},
"screenshots": [],
"version": "0.0.1",
"updated": "2023-03-30"
},
"backend": true,
"proxy": true,
"backendType": "kube-api",
"kubePermissions": [
{
"apiGroups": [
""
],
"resources": [
"nodes",
"nodes/status"
],
"verbs": [
"get",
"list"
]
}
],
"dependencies": {
"velauxDependency": ">=1.8.0",
"plugins": []
}
}

View File

@ -1,31 +0,0 @@
{
"type": "page-app",
"name": "Backend Kube Service",
"id": "backend-kube-service",
"info": {
"keywords": [
"app"
],
"description": "The app demo plugin",
"author": {
"name": "KubeVela"
},
"logos": {
"small": "img/logo.svg",
"large": "img/logo.svg"
},
"screenshots": [],
"version": "0.0.1",
"updated": "2023-03-30"
},
"backend": true,
"proxy": true,
"backendType": "kube-service",
"backendService": {
"name": "test"
},
"dependencies": {
"velauxDependency": ">=1.8.0",
"plugins": []
}
}

View File

@ -1,41 +0,0 @@
{
"type": "page-app",
"name": "Frontend",
"id": "frontend",
"info": {
"keywords": [
"app"
],
"description": "The app demo plugin",
"author": {
"name": "KubeVela"
},
"logos": {
"small": "img/logo.svg",
"large": "img/logo.svg"
},
"screenshots": [],
"version": "0.0.1",
"updated": "2023-03-30"
},
"includes": [
{
"workspace": {
"name": "continuous-delivery"
},
"type": "Workspace",
"label": "Custom App",
"name": "custom-app",
"to": "/plugins/frontend",
"relatedRoute": [
"/plugins/frontend"
],
"icon": "",
"catalog": "Continuous Delivery"
}
],
"dependencies": {
"velauxDependency": ">=1.8.0",
"plugins": []
}
}

View File

@ -1,7 +0,0 @@
name: bar
version: 1.0.0
description: Vela test addon named bar
icon: https://www.nar.com/icon
url: https://www.bar.com

Some files were not shown because too many files have changed in this diff Show More