Compare commits
174 Commits
Author | SHA1 | Date |
---|---|---|
|
3fbff4c6ca | |
|
25e05a99a7 | |
|
a6059bdaeb | |
|
c00a4d09b6 | |
|
6d428b4494 | |
|
30f5bcf688 | |
|
06a1f4f489 | |
|
0d5eeccfe4 | |
|
db84782365 | |
|
eb6731f712 | |
|
0e00b0141e | |
|
58171162d1 | |
|
51779e5deb | |
|
7560dd6359 | |
|
d39df4c7ae | |
|
0442dc5134 | |
|
ad6e33f5b5 | |
|
673f439f5e | |
|
e0ac998038 | |
|
f8b4576f6d | |
|
1d6d4b7134 | |
|
9e3f67f5c7 | |
|
fc6283ea32 | |
|
a7d47d43db | |
|
2de2d3f044 | |
|
b169de3dce | |
|
0b5e60c486 | |
|
d9bad06282 | |
|
e5beed3362 | |
|
d629678fe5 | |
|
4e3d0cf703 | |
|
4f86219c18 | |
|
12b1b438a2 | |
|
1894482bc0 | |
|
83cd18af0c | |
|
0526ec5d3a | |
|
de8b12c70a | |
|
dcd1f8e020 | |
|
aa654e43ba | |
|
cf7697b2aa | |
|
adf1207b2f | |
|
0023bec0c6 | |
|
7f53871ce0 | |
|
fcee57f94c | |
|
026f3c51e7 | |
|
5ec96673c9 | |
|
45c7785c1c | |
|
a4eaade918 | |
|
d946a25230 | |
|
36e114bbb6 | |
|
9e1b2f5cf1 | |
|
18947c887e | |
|
03118e3f12 | |
|
86e6e32add | |
|
126a4d90ac | |
|
44683c69c8 | |
|
f0a16ee86c | |
|
f097e3b7af | |
|
c9251b3339 | |
|
987944188d | |
|
b0c9de6c52 | |
|
7e891811aa | |
|
966b5b3f4f | |
|
b3d0f33e83 | |
|
46423ab3d7 | |
|
b0ab86ba31 | |
|
bf552c41d3 | |
|
68565d8813 | |
|
7dfbffb8a3 | |
|
daa0bbae83 | |
|
8af85e3f79 | |
|
4cc83b9d80 | |
|
78f2b722a2 | |
|
4649f50cf3 | |
|
7bbe3ed36b | |
|
616e8bc3d1 | |
|
c5629c58e7 | |
|
803dea9a91 | |
|
f8a146b53e | |
|
4103c93262 | |
|
6e993c4350 | |
|
8c9a6d1333 | |
|
8bc61109db | |
|
9215056b63 | |
|
881dbd0bf8 | |
|
15d2980d74 | |
|
b19bd407b4 | |
|
bc833fef25 | |
|
9b8b7dad87 | |
|
0ff26093f4 | |
|
07285eb5e9 | |
|
63a004d9f3 | |
|
6a8e96ba96 | |
|
4d374bf798 | |
|
93da062279 | |
|
27b2a01712 | |
|
666766a90f | |
|
56bb734de3 | |
|
57c3634f06 | |
|
71eecdfefa | |
|
3730421a32 | |
|
2a97a32493 | |
|
77f3893dd5 | |
|
c59c5d2c77 | |
|
9762799dcf | |
|
acdbd0696a | |
|
69628d2eb6 | |
|
a073d66c5a | |
|
fec770ec95 | |
|
d318bc5b3c | |
|
84ad698571 | |
|
c3b58d12c3 | |
|
9a356a8757 | |
|
a73726af97 | |
|
99a03ff33d | |
|
0c8b71642e | |
|
817709c82f | |
|
c9f01b09bf | |
|
0246dfb224 | |
|
659145cc3e | |
|
ad06ea59af | |
|
e4e7f8dace | |
|
3f9e328936 | |
|
bc7f6703fa | |
|
67f0174b11 | |
|
d18fa737d0 | |
|
b925e5ecf1 | |
|
67a34e986c | |
|
3e1a50e5f8 | |
|
73404a80ad | |
|
a0480f91d9 | |
|
f3d360ba7d | |
|
cb9baa85c6 | |
|
537e7f7a7b | |
|
70ccecd600 | |
|
5e9ead9efd | |
|
7fc2406087 | |
|
39b8c14650 | |
|
6fe6a6eee2 | |
|
d14977c7ec | |
|
72f352fc30 | |
|
719554eab3 | |
|
ac7ec7da66 | |
|
f949feb2d9 | |
|
479943d510 | |
|
47edb81ae3 | |
|
e5600c3f4d | |
|
0d0aae967f | |
|
c3dc4950e5 | |
|
50208a709a | |
|
11d0180134 | |
|
8c2d990e3f | |
|
3ca77c1de2 | |
|
5e16f7a632 | |
|
f9dc354ae4 | |
|
942bcc9eb4 | |
|
d9b932f782 | |
|
34e9b6c5fe | |
|
c1c9be0c2e | |
|
48991a0105 | |
|
0085ce08f9 | |
|
a878d7d7f4 | |
|
3ec46b414d | |
|
6eb2ed5c0f | |
|
5840d8ff4e | |
|
8120649796 | |
|
4948b2bf2e | |
|
c0a4ddc089 | |
|
df4e0b3f71 | |
|
6cdc17eb9c | |
|
5a9fa55fe6 | |
|
0de5d0685b | |
|
f4e9f582fd | |
|
66213fa047 |
|
@ -0,0 +1,110 @@
|
|||
### Java template
|
||||
# Compiled class file
|
||||
*.class
|
||||
|
||||
# Log file
|
||||
*.log
|
||||
|
||||
# BlueJ files
|
||||
*.ctxt
|
||||
|
||||
# Mobile Tools for Java (J2ME)
|
||||
.mtj.tmp/
|
||||
|
||||
# Package Files #
|
||||
*.jar
|
||||
*.war
|
||||
*.nar
|
||||
*.ear
|
||||
*.zip
|
||||
*.tar.gz
|
||||
*.rar
|
||||
|
||||
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
|
||||
hs_err_pid*
|
||||
replay_pid*
|
||||
|
||||
### JetBrains template
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/**/usage.statistics.xml
|
||||
.idea/**/dictionaries
|
||||
.idea/**/shelf
|
||||
|
||||
# AWS User-specific
|
||||
.idea/**/aws.xml
|
||||
|
||||
# Generated files
|
||||
.idea/**/contentModel.xml
|
||||
|
||||
# Sensitive or high-churn files
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
.idea/**/dbnavigator.xml
|
||||
|
||||
# Gradle
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# Gradle and Maven with auto-import
|
||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||
# since they will be recreated, and may cause churn. Uncomment if using
|
||||
# auto-import.
|
||||
# .idea/artifacts
|
||||
# .idea/compiler.xml
|
||||
# .idea/jarRepositories.xml
|
||||
# .idea/modules.xml
|
||||
# .idea/*.iml
|
||||
# .idea/modules
|
||||
# *.iml
|
||||
# *.ipr
|
||||
|
||||
# CMake
|
||||
cmake-build-*/
|
||||
|
||||
# Mongo Explorer plugin
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
# File-based project format
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# SonarLint plugin
|
||||
.idea/sonarlint/
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
# Editor-based Rest Client
|
||||
.idea/httpRequests
|
||||
|
||||
# Android studio 3.1+ serialized cache file
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
# GitHub
|
||||
.github
|
||||
|
||||
# Git
|
||||
.git
|
|
@ -0,0 +1,49 @@
|
|||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: java-spiffe-helper
|
||||
data:
|
||||
java-spiffe-helper.properties: |
|
||||
keyStorePath=/tmp/keystore.p12
|
||||
keyStorePass=password
|
||||
keyPass=password
|
||||
trustStorePath=/tmp/truststore.p12
|
||||
trustStorePass=password
|
||||
keyStoreType=pkcs12
|
||||
keyAlias=spiffe
|
||||
spiffeSocketPath=unix:/run/spire/agent-sockets/spire-agent.sock
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Pod
|
||||
metadata:
|
||||
name: java-spiffe-helper
|
||||
labels:
|
||||
app: java-spiffe-helper
|
||||
spec:
|
||||
containers:
|
||||
- name: java-spiffe-helper
|
||||
image: java-spiffe-helper:test
|
||||
imagePullPolicy: IfNotPresent
|
||||
readinessProbe:
|
||||
initialDelaySeconds: 15
|
||||
exec:
|
||||
command:
|
||||
- ls
|
||||
- /tmp/truststore.p12
|
||||
volumeMounts:
|
||||
- name: properties
|
||||
mountPath: /app/java-spiffe-helper.properties
|
||||
subPath: java-spiffe-helper.properties
|
||||
- name: spire-sockets
|
||||
mountPath: /run/spire/agent-sockets
|
||||
readOnly: true
|
||||
restartPolicy: Never
|
||||
volumes:
|
||||
- name: properties
|
||||
configMap:
|
||||
name: java-spiffe-helper
|
||||
- name: spire-sockets
|
||||
hostPath:
|
||||
path: /run/spire/agent-sockets
|
||||
type: DirectoryOrCreate
|
|
@ -0,0 +1,18 @@
|
|||
spire-server:
|
||||
ca_subject:
|
||||
common_name: common_name
|
||||
controllerManager:
|
||||
identities:
|
||||
clusterSPIFFEIDs:
|
||||
default:
|
||||
enabled: false
|
||||
java-spiffe-helper:
|
||||
spiffeIDTemplate: spiffe://{{ .TrustDomain }}/ns/{{ .PodMeta.Namespace }}/sa/{{ .PodSpec.ServiceAccountName }}
|
||||
namespaceSelector:
|
||||
matchLabels:
|
||||
kubernetes.io/metadata.name: default
|
||||
podSelector:
|
||||
matchLabels:
|
||||
app: java-spiffe-helper
|
||||
dnsNameTemplates:
|
||||
- dnsNameTemplate
|
|
@ -0,0 +1,15 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gradle"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
|
@ -1,10 +1,14 @@
|
|||
name: continuous-integration
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
|
||||
|
@ -13,17 +17,17 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
java-version: [ 8, 11, 16 ]
|
||||
java-version: [ 8, 11, 17, 21 ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up JDK
|
||||
uses: actions/setup-java@v2
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: ${{ matrix.java-version }}
|
||||
distribution: 'adopt'
|
||||
- name: Cache Gradle packages
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
|
@ -34,7 +38,7 @@ jobs:
|
|||
- name: Build with Gradle and generate the artifacts (also run the tests)
|
||||
run: ./gradlew build
|
||||
- name: Run integration tests
|
||||
run: ./integration-tests.sh
|
||||
run: ./.github/workflows/scripts/integration-tests.sh
|
||||
- name: Cleanup Gradle Cache
|
||||
# Remove some files from the Gradle cache, so they aren't cached by GitHub Actions.
|
||||
# Restoring these files from a GitHub Actions cache might cause problems for future builds.
|
||||
|
@ -47,17 +51,17 @@ jobs:
|
|||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
java-version: [ 8, 11, 14 ] # macos-latest at his point doesn't have java 16 installed
|
||||
java-version: [ 8, 11, 17, 21 ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up JDK
|
||||
uses: actions/setup-java@v2
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: ${{ matrix.java-version }}
|
||||
distribution: 'adopt'
|
||||
distribution: 'zulu'
|
||||
- name: Cache Gradle packages
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
|
@ -10,16 +10,16 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up JDK
|
||||
uses: actions/setup-java@v2
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: '16'
|
||||
distribution: 'adopt'
|
||||
- name: Generate and upload coverage report
|
||||
env:
|
||||
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
|
||||
run: ./gradlew build jacocoTestReport coveralls
|
||||
run: ./gradlew jacocoTestReport coveralls
|
||||
- name: Cleanup Gradle Cache
|
||||
# Remove some files from the Gradle cache, so they aren't cached by GitHub Actions.
|
||||
# Restoring these files from a GitHub Actions cache might cause problems for future builds.
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
name: Java SPIFFE Helper CI
|
||||
|
||||
on:
|
||||
- pull_request
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test with SPIRE helm chart in version ${{ matrix.spire-chart-version.spire }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
spire-chart-version:
|
||||
- spire: '0.17.x'
|
||||
crds: '0.3.x'
|
||||
|
||||
env:
|
||||
HELM_REPOSITORY: https://spiffe.github.io/helm-charts-hardened/
|
||||
KEYSTORE_COMMON_NAME: keystore-${{ github.sha }}
|
||||
TRUSTSTORE_COMMON_NAME: truststore-${{ github.sha }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Setup buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '17'
|
||||
- name: Build container
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
tags: java-spiffe-helper:test
|
||||
load: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
- name: Create local kubernetes cluster
|
||||
uses: helm/kind-action@v1
|
||||
with:
|
||||
cluster_name: kind
|
||||
- name: Load container image onto kubernetes node
|
||||
run: kind load docker-image java-spiffe-helper:test --name kind
|
||||
- name: Install SPIRE CRDs in version ${{ matrix.spire-chart-version.crds }}
|
||||
run: |
|
||||
helm upgrade --install -n spire-server spire-crds spire-crds \
|
||||
--repo ${{ env.HELM_REPOSITORY }} \
|
||||
--version ${{ matrix.spire-chart-version.crds }} \
|
||||
--create-namespace
|
||||
- name: Install SPIRE server in version ${{ matrix.spire-chart-version.spire }} and set to-be-verified values for common name
|
||||
run: |
|
||||
helm upgrade --install -n spire-server spire spire \
|
||||
--repo ${{ env.HELM_REPOSITORY }} \
|
||||
--version ${{ matrix.spire-chart-version.spire }} \
|
||||
--values .github/ci-k8s-configs/spire-values.yaml \
|
||||
--set spire-server.ca_subject.common_name="$TRUSTSTORE_COMMON_NAME" \
|
||||
--set spire-server.controllerManager.identities.clusterSPIFFEIDs.java-spiffe-helper.dnsNameTemplates[0]="$KEYSTORE_COMMON_NAME"
|
||||
- name: Deploy java-spiffe-helper pod to local cluster
|
||||
run: kubectl apply -f .github/ci-k8s-configs/java-spiffe-helper.yaml
|
||||
- name: Wait for java-spiffe-helper pod to become ready
|
||||
run: kubectl wait pod/java-spiffe-helper --for condition=Ready --timeout=90s
|
||||
- name: Output logs of java-spiffe-helper pod
|
||||
if: ${{ failure() }}
|
||||
run: kubectl logs pod/java-spiffe-helper
|
||||
- name: Describe java-spiffe-helper pod
|
||||
if: ${{ failure() }}
|
||||
run: kubectl describe pod/java-spiffe-helper
|
||||
- name: Copy keystore from java-spiffe-helper pod
|
||||
run: kubectl cp java-spiffe-helper:/tmp/keystore.p12 keystore.p12
|
||||
- name: Copy truststore from java-spiffe-helper pod
|
||||
run: kubectl cp java-spiffe-helper:/tmp/truststore.p12 truststore.p12
|
||||
- name: Verify keystore contains configured common name
|
||||
run: keytool -v -list -keystore keystore.p12 -storepass password | grep "CN=${{ env.KEYSTORE_COMMON_NAME }}"
|
||||
- name: Output keystore contents
|
||||
if: ${{ failure() }}
|
||||
run: keytool -v -list -keystore keystore.p12 -storepass password
|
||||
- name: Verify truststore contains configured common name
|
||||
run: keytool -v -list -keystore truststore.p12 -storepass password | grep "CN=${{ env.TRUSTSTORE_COMMON_NAME }}"
|
||||
- name: Output truststore contents
|
||||
if: ${{ failure() }}
|
||||
run: keytool -v -list -keystore truststore.p12 -storepass password
|
|
@ -0,0 +1,59 @@
|
|||
name: Release Build
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
JAVA_VERSION: '17'
|
||||
|
||||
jobs:
|
||||
publishToMaven:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NEXUS_USERNAME: ${{ secrets.NEXUS_USERNAME }}
|
||||
NEXUS_TOKEN: ${{ secrets.NEXUS_TOKEN }}
|
||||
PGP_PRIVATE_KEY: ${{ secrets.PGP_PRIVATE_KEY }}
|
||||
PGP_KEY_PASSPHRASE: ${{ secrets.PGP_KEY_PASSPHRASE }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up JDK
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: ${{ env.JAVA_VERSION }}
|
||||
distribution: 'adopt'
|
||||
- name: Publish to Nexus Maven Repository
|
||||
run: ./gradlew publish
|
||||
|
||||
publishDockerImage:
|
||||
needs: publishToMaven
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Determine Docker Tag
|
||||
run: echo "DOCKER_TAG=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- name: Publish java-spiffe-helper Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ env.REGISTRY }}/${{ github.repository }}-helper:${{ env.DOCKER_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
|
@ -7,7 +7,7 @@ set -euf -o pipefail
|
|||
|
||||
export SPIFFE_ENDPOINT_SOCKET="unix:/tmp/spire-agent/public/api.sock"
|
||||
|
||||
spire_version="1.0.2"
|
||||
spire_version="1.11.0"
|
||||
spire_folder="spire-${spire_version}"
|
||||
spire_server_log_file="/tmp/spire-server/server.log"
|
||||
spire_agent_log_file="/tmp/spire-agent/agent.log"
|
||||
|
@ -24,7 +24,7 @@ function cleanup() {
|
|||
trap cleanup EXIT
|
||||
|
||||
# Install and run a SPIRE server
|
||||
curl -s -N -L https://github.com/spiffe/spire/releases/download/v${spire_version}/spire-${spire_version}-linux-x86_64-glibc.tar.gz | tar xz
|
||||
curl -s -N -L https://github.com/spiffe/spire/releases/download/v${spire_version}/spire-${spire_version}-linux-amd64-musl.tar.gz | tar xz
|
||||
pushd "${spire_folder}"
|
||||
mkdir -p /tmp/spire-server
|
||||
bin/spire-server run -config conf/server/server.conf > "${spire_server_log_file}" 2>&1 &
|
|
@ -0,0 +1,125 @@
|
|||
# Changelog
|
||||
|
||||
## [0.8.12] - 2025-06-05
|
||||
|
||||
### Dependency updates
|
||||
|
||||
- Bump grpcVersion to 1.73.0 (#327)
|
||||
- Bump com.nimbusds:nimbus-jose-jwt to 10.3 (#323)
|
||||
- Bump io.netty:netty-transport-native-kqueue to 4.2.1.Final (#321)
|
||||
- Bump com.google.protobuf:protobuf-gradle-plugin to 0.9.5 (#314)
|
||||
- Bump org.projectlombok:lombok to 1.18.38 (#313)
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation about sslNegotiation config in java-spiffe-provider (#325)
|
||||
|
||||
## [0.8.11] - 2024-11-18
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix CRLF newline removal in DER format logic for Windows compatibility (#284)
|
||||
|
||||
### Dependency updates
|
||||
|
||||
- Bump io.netty:netty-transport-native-kqueue from 4.1.114.Final to 4.1.115.Final (#281)
|
||||
- Bump com.nimbusds:nimbus-jose-jwt from 9.45 to 9.47 (#285)
|
||||
- Bump org.projectlombok:lombok from 1.18.34 to 1.18.36 (#286)
|
||||
|
||||
|
||||
## [0.8.10] - 2024-11-06
|
||||
|
||||
### Dependency updates
|
||||
|
||||
- Bump jupiterVersion from 5.11.2 to 5.11.3 (#273)
|
||||
- Bump grpcVersion from 1.68.0 to 1.68.1 (#276)
|
||||
- Bump com.nimbusds:nimbus-jose-jwt from 9.41.2 to 9.45 (#278)
|
||||
|
||||
|
||||
## [0.8.9] - 2024-10-09
|
||||
|
||||
### Dependency updates
|
||||
|
||||
- Bump `com.google.protobuf:protoc` to `3.25.5` (#271)
|
||||
|
||||
## [0.8.8] - 2024-10-08
|
||||
|
||||
### Dependency updates
|
||||
|
||||
- Bump `grpcVersion` from 1.66.0 to 1.68.0 (#262)
|
||||
- Bump `io.netty:netty-transport-native-kqueue` from 4.1.113.Final to 4.1.114.Final (#265)
|
||||
- Bump `com.nimbusds:nimbus-jose-jwt` from 9.41.1 to 9.41.2 in (#266)
|
||||
|
||||
### Changed
|
||||
|
||||
- Updated Gradle to version 8.10.2 (#269)
|
||||
|
||||
## [0.8.7] - 2024-09-20
|
||||
|
||||
### Dependency updates
|
||||
|
||||
- Bump `grpcVersion` from 1.62.2 to 1.66.0 (#248)
|
||||
- Bump `io.netty:netty-transport-native-kqueue` from 4.1.107.Final to 4.1.113.Final (#260)
|
||||
- Bump `commons-cli:commons-cli` from 1.6.0 to 1.9.0 (#258)
|
||||
- Bump `com.nimbusds:nimbus-jose-jwt` from 9.37.3 to 9.41.1 (#259)
|
||||
- Bump `org.apache.commons:commons-lang3` from 3.14.0 to 3.17.0 (#255)
|
||||
- Bump `org.projectlombok:lombok` from 1.18.30 to 1.18.34 (#253)
|
||||
- Bump `commons-validator:commons-validator` from 1.8.0 to 1.9.0 (#251)
|
||||
- Bump `jupiterVersion` from 5.10.2 to 5.11.0 (#254)
|
||||
|
||||
## [0.8.6] - 2024-03-04
|
||||
|
||||
### Dependency updates
|
||||
|
||||
- Bump `com.google.protobuf:protoc` from 3.25.2 to 3.25.3 (#218)
|
||||
- Bump `io.grpc:grpc-protobuf`, `io.grpc:grpc-stub`, `io.grpc:grpc-netty`, `io.grpc:grpc-netty-shaded`,
|
||||
and `io.grpc:protoc-gen-grpc-java` from 1.61.1 to 1.62.2 (#222)
|
||||
- Bump `io.netty:netty-transport-native-kqueue` from 4.1.106.Final to 4.1.107.Final (#205)
|
||||
|
||||
### CI/CD Improvements
|
||||
|
||||
Automated build and publish process via GitHub Actions.
|
||||
|
||||
## [0.8.5] - 2024-14-02
|
||||
|
||||
### Added
|
||||
|
||||
- Docker container and CI workflow for `java-spiffe-helper` (#187)
|
||||
|
||||
### Changed
|
||||
|
||||
- Updated Gradle to version 8.5 (#201)
|
||||
- Various enhancements in `java-spiffe-helper` (#199)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Addressed a Fat Jar Assembly issue. (#198)
|
||||
|
||||
### Dependency updates
|
||||
|
||||
- Bump `io.grpc:grpc-protobuf` and `io.grpc:grpc-stub` from 1.54.0 to 1.61.1 (#202)
|
||||
- Bump `commons-validator:commons-validator` from 1.7. to 1.8.0 (#197)
|
||||
- Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 (#196)
|
||||
- Bump `com.google.protobuf:protoc` from 3.21.12 to 3.25.2 (#193)
|
||||
- Bump `io.netty:netty-transport-native-kqueue` from 4.1.91.Final to 4.1.106.Final (#192)
|
||||
- Bump `org.apache.commons:commons-lang3` from 3.12.0 to 3.14.0 (#189)
|
||||
- Bump `com.nimbusds:nimbus-jose-jwt` from 9.31 to 9.37.3 (#184)
|
||||
- Bump `org.projectlombok:lombok` from 1.18.26 to 1.18.30 (#170)
|
||||
- Bump `com.google.protobuf:protobuf-gradle-plugin` from 0.9.2 to 0.9.4 (#153)
|
||||
|
||||
## [0.8.4] - 2023-04-14
|
||||
|
||||
### Dependencies updates
|
||||
|
||||
- Bump `commons-cli:commons-cli` from 1.4 to 1.5.0 (#124)
|
||||
- Bump `com.google.osdetector` from 1.6.2 to 1.7.3 (#125)
|
||||
- Bump `org.apache.commons:commons-lang3` from 3.11 to 3.12.0 (#129)
|
||||
- Bump `org.projectlombok:lombok` from 1.18.20 to 1.18.26 (#128)
|
||||
|
||||
## [0.8.3] - 2023-04-13
|
||||
|
||||
### Added
|
||||
|
||||
- A `JwtSource` implementation,`CachedJwtSource`, that caches the JWT SVIDs based on their subjects and audiences (#116)
|
||||
- Support for the `hint` field in the SVIDs retrieved by Workload API client (#114)
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
@maxlambrecht @rturner3
|
||||
* @maxlambrecht @rturner3
|
||||
|
||||
##########################################
|
||||
# Maintainers
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
FROM eclipse-temurin:17-jdk AS builder
|
||||
WORKDIR /builder
|
||||
COPY . /builder
|
||||
|
||||
RUN ./gradlew dependencies
|
||||
RUN ./gradlew java-spiffe-helper:assemble -ParchiveClassifier=docker -Pversion=docker
|
||||
|
||||
FROM eclipse-temurin:17-jre AS runner
|
||||
USER nobody
|
||||
|
||||
COPY conf/java-spiffe-helper.properties /app/java-spiffe-helper.properties
|
||||
COPY --from=builder /builder/java-spiffe-helper/build/libs/java-spiffe-helper-docker-docker.jar /app/java-spiffe-helper.jar
|
||||
|
||||
ENTRYPOINT ["java", "-jar", "/app/java-spiffe-helper.jar"]
|
||||
CMD ["--config", "/app/java-spiffe-helper.properties"]
|
66
README.md
66
README.md
|
@ -1,7 +1,7 @@
|
|||
# Java SPIFFE Library
|
||||
|
||||
[](https://github.com/spiffe/java-spiffe/actions/workflows/gradle.yml)
|
||||
[](https://coveralls.io/github/spiffe/java-spiffe?branch=master)
|
||||
[](https://github.com/spiffe/java-spiffe/actions/workflows/build.yml?query=branch%3Amain)
|
||||
[](https://coveralls.io/github/spiffe/java-spiffe?branch=main)
|
||||
|
||||
## Overview
|
||||
|
||||
|
@ -26,7 +26,7 @@ X.509 and JWT SVIDs and bundles.
|
|||
Download
|
||||
--------
|
||||
|
||||
The JARs can be downloaded from [Maven Central](https://search.maven.org/search?q=g:io.spiffe%20AND%20v:0.7.0).
|
||||
The JARs can be downloaded from [Maven Central](https://search.maven.org/search?q=g:io.spiffe%20AND%20v:0.8.12).
|
||||
|
||||
The dependencies can be added to `pom.xml`
|
||||
|
||||
|
@ -35,7 +35,7 @@ To import the `java-spiffe-provider` component:
|
|||
<dependency>
|
||||
<groupId>io.spiffe</groupId>
|
||||
<artifactId>java-spiffe-provider</artifactId>
|
||||
<version>0.7.0</version>
|
||||
<version>0.8.12</version>
|
||||
</dependency>
|
||||
```
|
||||
The `java-spiffe-provider` component imports the `java-spiffe-core` component.
|
||||
|
@ -45,7 +45,7 @@ To just import the `java-spiffe-core` component:
|
|||
<dependency>
|
||||
<groupId>io.spiffe</groupId>
|
||||
<artifactId>java-spiffe-core</artifactId>
|
||||
<version>0.7.0</version>
|
||||
<version>0.8.12</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
|
@ -53,39 +53,70 @@ Using Gradle:
|
|||
|
||||
Import `java-spiffe-provider`:
|
||||
```gradle
|
||||
implementation group: 'io.spiffe', name: 'java-spiffe-provider', version: '0.7.0'
|
||||
implementation group: 'io.spiffe', name: 'java-spiffe-provider', version: '0.8.12'
|
||||
```
|
||||
|
||||
Import `java-spiffe-core`:
|
||||
```gradle
|
||||
implementation group: 'io.spiffe', name: 'java-spiffe-core', version: '0.7.0'
|
||||
implementation group: 'io.spiffe', name: 'java-spiffe-core', version: '0.8.12'
|
||||
```
|
||||
|
||||
### MacOS Support
|
||||
|
||||
Add to your `pom.xml`:
|
||||
#### x86 Architecture
|
||||
|
||||
In case run on a osx-x86 architecture, add to your `pom.xml`:
|
||||
|
||||
```xml
|
||||
|
||||
<dependency>
|
||||
<groupId>io.spiffe</groupId>
|
||||
<artifactId>grpc-netty-macos</artifactId>
|
||||
<version>0.7.0</version>
|
||||
<version>0.8.12</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
Using Gradle:
|
||||
```gradle
|
||||
runtimeOnly group: 'io.spiffe', name: 'grpc-netty-macos', version: '0.7.0'
|
||||
runtimeOnly group: 'io.spiffe', name: 'grpc-netty-macos', version: '0.8.12'
|
||||
```
|
||||
|
||||
### Note: `java-spiffe-helper` artifact
|
||||
#### Aarch64 (M1) Architecture
|
||||
|
||||
As the [java-spiffe-helper](java-spiffe-helper/README.md) artifact is meant to be used as a standalone JAR and not as a Maven dependency,
|
||||
it is not published to Maven Central, but to [Github releases](https://github.com/spiffe/java-spiffe/releases/tag/v0.7.0), for both
|
||||
[Linux](https://github.com/spiffe/java-spiffe/releases/download/v0.7.0/java-spiffe-helper-0.7.0-linux-x86_64.jar) and
|
||||
[MacOS](https://github.com/spiffe/java-spiffe/releases/download/v0.7.0/java-spiffe-helper-0.7.0-osx-x86_64.jar) versions.
|
||||
If you are running the aarch64 architecture (M1 CPUs), add to your `pom.xml`:
|
||||
|
||||
### Build the JARs
|
||||
```xml
|
||||
|
||||
<dependency>
|
||||
<groupId>io.spiffe</groupId>
|
||||
<artifactId>grpc-netty-macos-aarch64</artifactId>
|
||||
<version>0.8.12</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
Using Gradle:
|
||||
|
||||
```gradle
|
||||
runtimeOnly group: 'io.spiffe', name: 'grpc-netty-macos-aarch64', version: '0.8.12'
|
||||
```
|
||||
|
||||
*Caveat: not all OpenJDK distributions are aarch64 native, make sure your JDK is also running
|
||||
natively*
|
||||
|
||||
|
||||
## Java SPIFFE Helper
|
||||
|
||||
The `java-spiffe-helper` module manages X.509 SVIDs and Bundles in Java Keystores.
|
||||
|
||||
### Docker Image
|
||||
|
||||
Pull the `java-spiffe-helper` image from `ghcr.io/spiffe/java-spiffe-helper:0.8.12`.
|
||||
|
||||
For more details, see [java-spiffe-helper/README.md](java-spiffe-helper/README.md).
|
||||
|
||||
## Build the JARs
|
||||
|
||||
On Linux or MacOS, run:
|
||||
|
||||
|
@ -105,4 +136,5 @@ For the module [java-spiffe-helper](java-spiffe-helper), a fat jar is generated
|
|||
Based on the OS where the build is run, the `[os-classifier]` will be:
|
||||
|
||||
* `-linux-x86_64` for Linux
|
||||
* `-osx-x86_64` for MacOS
|
||||
* `-osx-x86_64` for MacOS with x86_64 architecture
|
||||
* `-osx-aarch64` for MacOS with aarch64 architecture (M1)
|
||||
|
|
92
build.gradle
92
build.gradle
|
@ -1,6 +1,7 @@
|
|||
plugins {
|
||||
id 'com.github.kt3k.coveralls' version '2.12.0'
|
||||
id 'com.google.osdetector' version '1.6.2'
|
||||
id 'com.github.kt3k.coveralls' version '2.12.2'
|
||||
id 'com.google.osdetector' version '1.7.3'
|
||||
id 'jvm-test-suite'
|
||||
}
|
||||
|
||||
allprojects {
|
||||
|
@ -12,24 +13,28 @@ allprojects {
|
|||
|
||||
subprojects {
|
||||
group = 'io.spiffe'
|
||||
version = '0.7.0'
|
||||
version = project.version
|
||||
|
||||
ext {
|
||||
grpcVersion = '1.40.1'
|
||||
jupiterVersion = '5.7.0'
|
||||
mockitoVersion = '3.5.15'
|
||||
lombokVersion = '1.18.20'
|
||||
nimbusVersion = '9.13'
|
||||
grpcVersion = '1.73.0'
|
||||
jupiterVersion = '5.13.2'
|
||||
mockitoVersion = '4.11.0'
|
||||
lombokVersion = '1.18.38'
|
||||
nimbusVersion = '10.3.1'
|
||||
shadowVersion = '8.1.1'
|
||||
|
||||
//IMPORTANT: This must be in sync with the shaded netty version in gRPC
|
||||
nettyVersion = '4.2.2.Final'
|
||||
}
|
||||
|
||||
apply plugin: 'java-library'
|
||||
apply plugin: 'maven-publish'
|
||||
apply plugin: 'signing'
|
||||
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
|
||||
java {
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
|
||||
withJavadocJar()
|
||||
withSourcesJar()
|
||||
}
|
||||
|
@ -43,8 +48,8 @@ subprojects {
|
|||
repositories {
|
||||
maven {
|
||||
credentials {
|
||||
username = project.properties["mavenDeployUser"]
|
||||
password = project.properties["mavenDeployPassword"]
|
||||
username = project.properties["mavenDeployUser"] ?: System.getenv("NEXUS_USERNAME")
|
||||
password = project.properties["mavenDeployPassword"] ?: System.getenv("NEXUS_TOKEN")
|
||||
}
|
||||
url = project.properties["mavenDeployUrl"]
|
||||
}
|
||||
|
@ -77,12 +82,15 @@ subprojects {
|
|||
url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
}
|
||||
}
|
||||
|
||||
developers {
|
||||
developer {
|
||||
id = 'maxlambrecht'
|
||||
name = 'Max Lambrecht'
|
||||
email = 'maxlambrecht@gmail.com'
|
||||
['maxlambrecht:Max Lambrecht', 'rturner3:Ryan Turner'].each { devData ->
|
||||
developer {
|
||||
def devInfo = devData.split(':')
|
||||
id = devInfo[0]
|
||||
name = devInfo[1]
|
||||
url = 'https://github.com/' + devInfo[0]
|
||||
roles = ["Maintainer"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -91,12 +99,13 @@ subprojects {
|
|||
}
|
||||
|
||||
signing {
|
||||
useInMemoryPgpKeys(System.getenv('PGP_PRIVATE_KEY'), System.getenv('PGP_KEY_PASSPHRASE'))
|
||||
sign publishing.publications.mavenJava
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.11'
|
||||
implementation group: 'commons-validator', name: 'commons-validator', version: "1.7"
|
||||
implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.17.0'
|
||||
implementation group: 'commons-validator', name: 'commons-validator', version: "1.9.0"
|
||||
|
||||
testCompileOnly group: 'org.junit.jupiter', name: 'junit-jupiter-api', version: "${jupiterVersion}"
|
||||
testRuntimeOnly group: 'org.junit.jupiter', name: 'junit-jupiter-engine', version: "${jupiterVersion}"
|
||||
|
@ -105,6 +114,12 @@ subprojects {
|
|||
testCompileOnly group: 'org.mockito', name: 'mockito-core', version: "${mockitoVersion}"
|
||||
testRuntimeOnly group: 'org.mockito', name: 'mockito-junit-jupiter', version: "${mockitoVersion}"
|
||||
|
||||
if (JavaVersion.current() == JavaVersion.VERSION_1_8) {
|
||||
testImplementation group: 'uk.org.webcompere', name: 'system-stubs-core', version: '2.0.3' // Last version supporting Java 8
|
||||
} else {
|
||||
testImplementation group: 'uk.org.webcompere', name: 'system-stubs-core', version: '2.1.8'
|
||||
}
|
||||
|
||||
// Project Lombok dependency
|
||||
compileOnly group: 'org.projectlombok', name: 'lombok', version: "${lombokVersion}"
|
||||
annotationProcessor group: 'org.projectlombok', name: 'lombok', version: "${lombokVersion}"
|
||||
|
@ -112,14 +127,10 @@ subprojects {
|
|||
testAnnotationProcessor group: 'org.projectlombok', name: 'lombok', version: "${lombokVersion}"
|
||||
}
|
||||
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
|
||||
testLogging {
|
||||
afterSuite { desc, result ->
|
||||
if (!desc.parent) {
|
||||
println "Results: ${result.resultType} (${result.testCount} tests, ${result.successfulTestCount} successes, ${result.failedTestCount} failures, ${result.skippedTestCount} skipped)"
|
||||
}
|
||||
testing {
|
||||
suites {
|
||||
test {
|
||||
useJUnitJupiter()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,8 +153,14 @@ task jacocoTestReport(type: JacocoReport) {
|
|||
}
|
||||
|
||||
reports {
|
||||
xml.enabled true
|
||||
html.enabled true
|
||||
xml.required = true
|
||||
html.required = true
|
||||
}
|
||||
}
|
||||
|
||||
jacocoTestReport.dependsOn {
|
||||
subprojects.collectMany { project ->
|
||||
project.tasks.matching { it.name in ['test'] }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -154,12 +171,6 @@ coveralls {
|
|||
'java-spiffe-provider/src/main/java']
|
||||
}
|
||||
|
||||
// always run the tests before generating the report
|
||||
jacocoTestReport.dependsOn {
|
||||
subprojects*.test
|
||||
copyJars // workaround to prevent deleting the build folder before generating the reports
|
||||
}
|
||||
|
||||
// copy submodules jars to a common folder for deploy
|
||||
task copyJars(type: Copy) {
|
||||
duplicatesStrategy = DuplicatesStrategy.INCLUDE
|
||||
|
@ -167,13 +178,4 @@ task copyJars(type: Copy) {
|
|||
into "$buildDir/libs"
|
||||
}
|
||||
|
||||
task assemble {
|
||||
dependsOn subprojects.assemble
|
||||
}
|
||||
|
||||
assemble.finalizedBy copyJars
|
||||
|
||||
task clean {
|
||||
dependsOn subprojects.clean
|
||||
delete "$buildDir"
|
||||
}
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
# Example java-spiffe-helper configuration
|
||||
|
||||
# KeyStore Path
|
||||
keyStorePath = keystore.p12
|
||||
|
||||
# Password for the KeyStore
|
||||
keyStorePass = REPLACE_WITH_YOUR_KEYSTORE_PASSWORD
|
||||
|
||||
# Password for the private key within the KeyStore
|
||||
keyPass = REPLACE_WITH_YOUR_PRIVATE_KEY_PASSWORD
|
||||
|
||||
# Path to the TrustStore file
|
||||
trustStorePath = truststore.p12
|
||||
|
||||
# TrustStore Password: Password for the TrustStore
|
||||
trustStorePass = REPLACE_WITH_YOUR_TRUSTSTORE_PASSWORD
|
||||
|
||||
# KeyStore Type: 'pkcs12' (default) or 'jks'
|
||||
keyStoreType = pkcs12
|
||||
|
||||
# Key Alias: Alias of the key within the KeyStore (Default: `spiffe`)
|
||||
keyAlias = spiffe
|
||||
|
||||
# SPIFFE Socket Path: Path to the SPIRE Agent's public API socket
|
||||
spiffeSocketPath = unix:/tmp/spire-agent/public/api.sock
|
|
@ -0,0 +1,2 @@
|
|||
version=0.8.12
|
||||
mavenDeployUrl=https://oss.sonatype.org/service/local/staging/deploy/maven2
|
Binary file not shown.
|
@ -1,6 +1,6 @@
|
|||
#Mon May 21 14:01:33 ART 2018
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip
|
||||
networkTimeout=10000
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.1-all.zip
|
||||
|
|
|
@ -1,78 +1,126 @@
|
|||
#!/usr/bin/env sh
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Copyright © 2015-2021 the original authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
#
|
||||
# Gradle start up script for POSIX generated by Gradle.
|
||||
#
|
||||
# Important for running:
|
||||
#
|
||||
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
|
||||
# noncompliant, but you have some other compliant shell such as ksh or
|
||||
# bash, then to run this script, type that shell name before the whole
|
||||
# command line, like:
|
||||
#
|
||||
# ksh Gradle
|
||||
#
|
||||
# Busybox and similar reduced shells will NOT work, because this script
|
||||
# requires all of these POSIX shell features:
|
||||
# * functions;
|
||||
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
|
||||
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
|
||||
# * compound commands having a testable exit status, especially «case»;
|
||||
# * various built-in commands including «command», «set», and «ulimit».
|
||||
#
|
||||
# Important for patching:
|
||||
#
|
||||
# (2) This script targets any POSIX shell, so it avoids extensions provided
|
||||
# by Bash, Ksh, etc; in particular arrays are avoided.
|
||||
#
|
||||
# The "traditional" practice of packing multiple parameters into a
|
||||
# space-separated string is a well documented source of bugs and security
|
||||
# problems, so this is (mostly) avoided, by progressively accumulating
|
||||
# options in "$@", and eventually passing that to Java.
|
||||
#
|
||||
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
|
||||
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
|
||||
# see the in-line comments for details.
|
||||
#
|
||||
# There are tweaks for specific operating systems such as AIX, CygWin,
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
app_path=$0
|
||||
|
||||
# Need this for daisy-chained symlinks.
|
||||
while
|
||||
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
|
||||
[ -h "$app_path" ]
|
||||
do
|
||||
ls=$( ls -ld "$app_path" )
|
||||
link=${ls#*' -> '}
|
||||
case $link in #(
|
||||
/*) app_path=$link ;; #(
|
||||
*) app_path=$APP_HOME$link ;;
|
||||
esac
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS=""
|
||||
# This is normally unused
|
||||
# shellcheck disable=SC2034
|
||||
APP_BASE_NAME=${0##*/}
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
MAX_FD=maximum
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
}
|
||||
} >&2
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
} >&2
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
case "$( uname )" in #(
|
||||
CYGWIN* ) cygwin=true ;; #(
|
||||
Darwin* ) darwin=true ;; #(
|
||||
MSYS* | MINGW* ) msys=true ;; #(
|
||||
NONSTOP* ) nonstop=true ;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
JAVACMD=$JAVA_HOME/jre/sh/java
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
JAVACMD=$JAVA_HOME/bin/java
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
@ -81,7 +129,7 @@ Please set the JAVA_HOME variable in your environment to match the
|
|||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
JAVACMD=java
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
|
@ -89,84 +137,109 @@ location of your Java installation."
|
|||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
case $i in
|
||||
(0) set -- ;;
|
||||
(1) set -- "$args0" ;;
|
||||
(2) set -- "$args0" "$args1" ;;
|
||||
(3) set -- "$args0" "$args1" "$args2" ;;
|
||||
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
|
||||
# shellcheck disable=SC3045
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
|
||||
# shellcheck disable=SC3045
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=$(save "$@")
|
||||
# Collect all arguments for the java command, stacking in reverse order:
|
||||
# * args from the command line
|
||||
# * the main class name
|
||||
# * -classpath
|
||||
# * -D...appname settings
|
||||
# * --module-path (only if needed)
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if "$cygwin" || "$msys" ; then
|
||||
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||
|
||||
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
|
||||
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
|
||||
cd "$(dirname "$0")"
|
||||
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
for arg do
|
||||
if
|
||||
case $arg in #(
|
||||
-*) false ;; # don't mess with options #(
|
||||
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
|
||||
[ -e "$t" ] ;; #(
|
||||
*) false ;;
|
||||
esac
|
||||
then
|
||||
arg=$( cygpath --path --ignore --mixed "$arg" )
|
||||
fi
|
||||
# Roll the args list around exactly as many times as the number of
|
||||
# args, so each arg winds up back in the position where it started, but
|
||||
# possibly modified.
|
||||
#
|
||||
# NB: a `for` loop captures its iteration list before it begins, so
|
||||
# changing the positional parameters here affects neither the number of
|
||||
# iterations, nor the values presented in `arg`.
|
||||
shift # remove old arg
|
||||
set -- "$@" "$arg" # push replacement arg
|
||||
done
|
||||
fi
|
||||
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Collect all arguments for the java command;
|
||||
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
|
||||
# shell script including quotes and variable substitutions, so put them in
|
||||
# double quotes to make sure that they get re-expanded; and
|
||||
# * put everything else in single quotes, so that it's not re-expanded.
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
-classpath "$CLASSPATH" \
|
||||
org.gradle.wrapper.GradleWrapperMain \
|
||||
"$@"
|
||||
|
||||
# Stop when "xargs" is not available.
|
||||
if ! command -v xargs >/dev/null 2>&1
|
||||
then
|
||||
die "xargs is not available"
|
||||
fi
|
||||
|
||||
# Use "xargs" to parse quoted args.
|
||||
#
|
||||
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||
#
|
||||
# In Bash we could simply go:
|
||||
#
|
||||
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
|
||||
# set -- "${ARGS[@]}" "$@"
|
||||
#
|
||||
# but POSIX shell has neither arrays nor command substitution, so instead we
|
||||
# post-process each arg (as a line of input to sed) to backslash-escape any
|
||||
# character that might be a shell metacharacter, then use eval to reverse
|
||||
# that process (while maintaining the separation between arguments), and wrap
|
||||
# the whole thing up as a single "set" statement.
|
||||
#
|
||||
# This will of course break if any of these variables contains a newline or
|
||||
# an unmatched quote.
|
||||
#
|
||||
|
||||
eval "set -- $(
|
||||
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
|
||||
xargs -n1 |
|
||||
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
|
||||
tr '\n' ' '
|
||||
)" '"$@"'
|
||||
|
||||
exec "$JAVACMD" "$@"
|
||||
|
|
|
@ -1,4 +1,20 @@
|
|||
@if "%DEBUG%" == "" @echo off
|
||||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%"=="" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
|
@ -9,19 +25,23 @@
|
|||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
if "%DIRNAME%"=="" set DIRNAME=.
|
||||
@rem This is normally unused
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS=
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
if %ERRORLEVEL% equ 0 goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
@ -35,7 +55,7 @@ goto fail
|
|||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
|
@ -45,38 +65,26 @@ echo location of your Java installation.
|
|||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
if %ERRORLEVEL% equ 0 goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
set EXIT_CODE=%ERRORLEVEL%
|
||||
if %EXIT_CODE% equ 0 set EXIT_CODE=1
|
||||
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
|
||||
exit /b %EXIT_CODE%
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
|
|
@ -4,7 +4,7 @@ buildscript {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
classpath group: 'com.google.protobuf', name: 'protobuf-gradle-plugin', version: '0.8.13'
|
||||
classpath group: 'com.google.protobuf', name: 'protobuf-gradle-plugin', version: '0.9.5'
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,6 +31,8 @@ sourceSets {
|
|||
}
|
||||
}
|
||||
|
||||
sourcesJar.duplicatesStrategy = DuplicatesStrategy.INCLUDE
|
||||
|
||||
configurations {
|
||||
integrationTestImplementation.extendsFrom testImplementation
|
||||
integrationTestCompile.extendsFrom testCompile
|
||||
|
@ -48,7 +50,7 @@ task integrationTest(type: Test) {
|
|||
|
||||
protobuf {
|
||||
protoc {
|
||||
artifact = 'com.google.protobuf:protoc:3.13.0'
|
||||
artifact = 'com.google.protobuf:protoc:3.25.5'
|
||||
}
|
||||
plugins {
|
||||
grpc {
|
||||
|
@ -64,15 +66,27 @@ protobuf {
|
|||
|
||||
dependencies {
|
||||
if (osdetector.os.is('osx') ) {
|
||||
compileOnly(project('grpc-netty-macos'))
|
||||
testImplementation(project('grpc-netty-macos'))
|
||||
project.ext.osArch = System.getProperty("os.arch")
|
||||
if ("x86_64" == project.ext.osArch) {
|
||||
compileOnly(project('grpc-netty-macos'))
|
||||
testImplementation(project('grpc-netty-macos'))
|
||||
} else if ("aarch64" == project.ext.osArch) {
|
||||
compileOnly(project('grpc-netty-macos-aarch64'))
|
||||
testImplementation(project('grpc-netty-macos-aarch64'))
|
||||
} else {
|
||||
throw new GradleException("Architecture not supported: " + project.ext.osArch)
|
||||
}
|
||||
} else {
|
||||
compileOnly(project('grpc-netty-linux'))
|
||||
testImplementation(project('grpc-netty-linux'))
|
||||
}
|
||||
|
||||
project.ext.osArch = System.getProperty("os.arch")
|
||||
|
||||
|
||||
implementation group: 'io.grpc', name: 'grpc-protobuf', version: "${grpcVersion}"
|
||||
implementation group: 'io.grpc', name: 'grpc-stub', version: "${grpcVersion}"
|
||||
testImplementation group: 'io.grpc', name: 'grpc-inprocess', version: "${grpcVersion}"
|
||||
testImplementation group: 'io.grpc', name: 'grpc-testing', version: "${grpcVersion}"
|
||||
compileOnly group: 'org.apache.tomcat', name: 'annotations-api', version: '6.0.53' // necessary for Java 9+
|
||||
|
||||
|
@ -81,7 +95,7 @@ dependencies {
|
|||
testFixturesImplementation group: 'com.nimbusds', name: 'nimbus-jose-jwt', version: "${nimbusVersion}"
|
||||
|
||||
// using bouncy castle for generating X.509 certs for testing purposes
|
||||
testFixturesImplementation group: 'org.bouncycastle', name: 'bcpkix-jdk15on', version: '1.66'
|
||||
testFixturesImplementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.11'
|
||||
testFixturesImplementation group: 'org.bouncycastle', name: 'bcpkix-jdk15on', version: '1.70'
|
||||
testFixturesImplementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.17.0'
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
description = "Java SPIFFE Library GRPC-Netty MacOS module"
|
||||
|
||||
dependencies {
|
||||
implementation group: 'io.grpc', name: 'grpc-netty', version: "${grpcVersion}"
|
||||
|
||||
// version must match the one in grpc-netty
|
||||
implementation group: 'io.netty', name: 'netty-transport-native-kqueue', version: "${nettyVersion}", classifier: 'osx-aarch_64'
|
||||
}
|
||||
|
||||
jar {
|
||||
archiveClassifier = ""
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
package io.spiffe.workloadapi.internal;
|
||||
|
||||
import io.grpc.ManagedChannel;
|
||||
import io.grpc.netty.NegotiationType;
|
||||
import io.grpc.netty.NettyChannelBuilder;
|
||||
import io.netty.channel.ChannelOption;
|
||||
import io.netty.channel.EventLoopGroup;
|
||||
import io.netty.channel.kqueue.KQueueDomainSocketChannel;
|
||||
import io.netty.channel.kqueue.KQueueEventLoopGroup;
|
||||
import io.netty.channel.unix.DomainSocketAddress;
|
||||
import lombok.NonNull;
|
||||
import lombok.val;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
/**
|
||||
* Factory for creating ManagedChannel instances for Mac OS.
|
||||
*/
|
||||
public final class GrpcManagedChannelFactory {
|
||||
|
||||
private static final String UNIX_SCHEME = "unix";
|
||||
private static final String TCP_SCHEME = "tcp";
|
||||
|
||||
private GrpcManagedChannelFactory() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a ManagedChannelWrapper that contains a {@link ManagedChannel} to the SPIFFE Socket Endpoint provided.
|
||||
*
|
||||
* @param address URI representing the Workload API endpoint.
|
||||
* @param executorService the executor to configure the event loop group
|
||||
* @return a instance of a {@link ManagedChannelWrapper}
|
||||
*/
|
||||
public static ManagedChannelWrapper newChannel(@NonNull URI address, ExecutorService executorService) {
|
||||
val scheme = address.getScheme();
|
||||
ManagedChannelWrapper result;
|
||||
switch (scheme) {
|
||||
case UNIX_SCHEME:
|
||||
result = createNativeSocketChannel(address, executorService);
|
||||
break;
|
||||
case TCP_SCHEME:
|
||||
result = createTcpChannel(address);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Address Scheme not supported: ");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Create a Native Socket Channel pointing to the spiffeSocketPath
|
||||
private static ManagedChannelWrapper createNativeSocketChannel(@NonNull URI address, ExecutorService executorService) {
|
||||
NettyChannelBuilder channelBuilder = NettyChannelBuilder.
|
||||
forAddress(new DomainSocketAddress(address.getPath()));
|
||||
EventLoopGroup eventLoopGroup = configureNativeSocketChannel(channelBuilder, executorService);
|
||||
ManagedChannel managedChannel = channelBuilder.usePlaintext().build();
|
||||
return new ManagedChannelWrapper(managedChannel, eventLoopGroup);
|
||||
}
|
||||
|
||||
private static ManagedChannelWrapper createTcpChannel(@NonNull URI address) {
|
||||
ManagedChannel managedChannel = NettyChannelBuilder.forAddress(address.getHost(), address.getPort())
|
||||
.negotiationType(NegotiationType.PLAINTEXT)
|
||||
.build();
|
||||
return new ManagedChannelWrapper(managedChannel);
|
||||
}
|
||||
|
||||
private static EventLoopGroup configureNativeSocketChannel(@NonNull NettyChannelBuilder channelBuilder, ExecutorService executorService) {
|
||||
if (SystemUtils.IS_OS_MAC) {
|
||||
// nThreads = 0 -> use Netty default
|
||||
KQueueEventLoopGroup eventLoopGroup = new KQueueEventLoopGroup(0, executorService);
|
||||
channelBuilder.eventLoopGroup(eventLoopGroup)
|
||||
// avoid warning Unknown channel option 'SO_KEEPALIVE'
|
||||
.withOption(ChannelOption.SO_KEEPALIVE, null)
|
||||
.channelType(KQueueDomainSocketChannel.class);
|
||||
return eventLoopGroup;
|
||||
}
|
||||
|
||||
throw new IllegalStateException("Operating System is not supported.");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package io.spiffe.workloadapi.internal;
|
||||
|
||||
import io.grpc.ManagedChannel;
|
||||
import io.netty.channel.EventLoopGroup;
|
||||
|
||||
import java.io.Closeable;
|
||||
|
||||
/**
|
||||
* Wraps a {@link ManagedChannel} along with the {@link EventLoopGroup} in order to
|
||||
* have more control and be able to shutdown the channel properly
|
||||
* calling the shutdownGracefully method on the EventLoopGroup to prevent
|
||||
* that some threads remain active.
|
||||
*/
|
||||
public class ManagedChannelWrapper implements Closeable {
|
||||
|
||||
private final ManagedChannel managedChannel;
|
||||
private final EventLoopGroup eventLoopGroup;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param managedChannel an instance of {@link ManagedChannel}
|
||||
* @param eventLoopGroup an instance of {@link EventLoopGroup}
|
||||
*/
|
||||
public ManagedChannelWrapper(ManagedChannel managedChannel, EventLoopGroup eventLoopGroup) {
|
||||
this.managedChannel = managedChannel;
|
||||
this.eventLoopGroup = eventLoopGroup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param managedChannel a {@link ManagedChannel}
|
||||
*/
|
||||
public ManagedChannelWrapper(ManagedChannel managedChannel) {
|
||||
this.managedChannel = managedChannel;
|
||||
this.eventLoopGroup = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (eventLoopGroup != null) {
|
||||
eventLoopGroup.shutdownGracefully();
|
||||
}
|
||||
managedChannel.shutdown();
|
||||
}
|
||||
|
||||
public ManagedChannel getChannel() {
|
||||
return managedChannel;
|
||||
}
|
||||
}
|
|
@ -2,7 +2,7 @@ description = "Java SPIFFE Library GRPC-Netty MacOS module"
|
|||
|
||||
dependencies {
|
||||
implementation group: 'io.grpc', name: 'grpc-netty', version: "${grpcVersion}"
|
||||
implementation group: 'io.netty', name: 'netty-transport-native-kqueue', version: '4.1.63.Final', classifier: 'osx-x86_64'
|
||||
implementation group: 'io.netty', name: 'netty-transport-native-kqueue', version: "${nettyVersion}", classifier: 'osx-x86_64'
|
||||
}
|
||||
|
||||
jar {
|
||||
|
|
|
@ -229,8 +229,8 @@ public class CertificateUtils {
|
|||
// Given a private key in PEM format, encode it as DER
|
||||
private static byte[] toDerFormat(final byte[] privateKeyPem) throws InvalidKeyException {
|
||||
String privateKeyAsString = new String(privateKeyPem);
|
||||
privateKeyAsString = privateKeyAsString.replaceAll("(-+BEGIN PRIVATE KEY-+\\r?\\n|-+END PRIVATE KEY-+\\r?\\n?)", "");
|
||||
privateKeyAsString = privateKeyAsString.replaceAll("\n", "");
|
||||
privateKeyAsString = privateKeyAsString.replaceAll("(-+BEGIN PRIVATE KEY-+|-+END PRIVATE KEY-+)", "");
|
||||
privateKeyAsString = privateKeyAsString.replaceAll("\r?\n", "");
|
||||
val decoder = Base64.getDecoder();
|
||||
try {
|
||||
return decoder.decode(privateKeyAsString);
|
||||
|
|
|
@ -21,7 +21,7 @@ public final class SpiffeIdUtils {
|
|||
|
||||
private static final char DEFAULT_CHAR_SEPARATOR = '|';
|
||||
|
||||
private static final Set<Character> SUPPORTED_SEPARATORS = Sets.newHashSet(DEFAULT_CHAR_SEPARATOR, ' ');
|
||||
private static final Set<Character> SUPPORTED_SEPARATORS = Sets.newHashSet(DEFAULT_CHAR_SEPARATOR, ' ', ',');
|
||||
|
||||
private SpiffeIdUtils() {
|
||||
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
|
||||
|
|
|
@ -64,19 +64,35 @@ public class JwtSvid {
|
|||
*/
|
||||
String token;
|
||||
|
||||
/**
|
||||
* Issued at time of JWT-SVID as present in 'iat' claim.
|
||||
*/
|
||||
Date issuedAt;
|
||||
|
||||
/**
|
||||
* Hint is an operator-specified string used to provide guidance on how this
|
||||
* identity should be used by a workload when more than one SVID is returned.
|
||||
*/
|
||||
String hint;
|
||||
|
||||
public static final String HEADER_TYP_JWT = "JWT";
|
||||
public static final String HEADER_TYP_JOSE = "JOSE";
|
||||
|
||||
private JwtSvid(final SpiffeId spiffeId,
|
||||
final Set<String> audience,
|
||||
final Date issuedAt,
|
||||
final Date expiry,
|
||||
final Map<String, Object> claims,
|
||||
final String token) {
|
||||
final String token,
|
||||
final String hint
|
||||
) {
|
||||
this.spiffeId = spiffeId;
|
||||
this.audience = audience;
|
||||
this.expiry = expiry;
|
||||
this.claims = claims;
|
||||
this.token = token;
|
||||
this.issuedAt = issuedAt;
|
||||
this.hint = hint;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -108,6 +124,41 @@ public class JwtSvid {
|
|||
@NonNull final Set<String> audience)
|
||||
throws JwtSvidException, BundleNotFoundException, AuthorityNotFoundException {
|
||||
|
||||
return parseAndValidate(token, jwtBundleSource, audience, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates a JWT-SVID token and returns an instance of {@link JwtSvid}.
|
||||
* <p>
|
||||
* The JWT-SVID signature is verified using the JWT bundle source.
|
||||
*
|
||||
* @param token a token as a string that is parsed and validated
|
||||
* @param jwtBundleSource an implementation of a {@link BundleSource} that provides the JWT authorities to
|
||||
* verify the signature
|
||||
* @param audience audience as a list of strings used to validate the 'aud' claim
|
||||
* @param hint a hint that can be used to provide guidance on how this identity should be used
|
||||
* @return an instance of a {@link JwtSvid} with a SPIFFE ID parsed from the 'sub', audience from 'aud', and expiry
|
||||
* from 'exp' claim.
|
||||
* @throws JwtSvidException when the token expired or the expiration claim is missing,
|
||||
* when the algorithm is not supported (See {@link JwtSignatureAlgorithm}),
|
||||
* when the header 'kid' is missing,
|
||||
* when the header 'typ' is present and is not 'JWT' or 'JOSE'
|
||||
* when the signature cannot be verified,
|
||||
* when the 'aud' claim has an audience that is not in the audience list
|
||||
* provided as parameter
|
||||
* @throws IllegalArgumentException when the token is blank or cannot be parsed
|
||||
* @throws BundleNotFoundException if the bundle for the trust domain of the spiffe id from the 'sub'
|
||||
* cannot be found in the JwtBundleSource
|
||||
* @throws AuthorityNotFoundException if the authority cannot be found in the bundle using the value from
|
||||
* the 'kid' header
|
||||
*/
|
||||
public static JwtSvid parseAndValidate(@NonNull final String token,
|
||||
@NonNull final BundleSource<JwtBundle> jwtBundleSource,
|
||||
@NonNull final Set<String> audience,
|
||||
final String hint
|
||||
)
|
||||
throws JwtSvidException, BundleNotFoundException, AuthorityNotFoundException {
|
||||
|
||||
if (StringUtils.isBlank(token)) {
|
||||
throw new IllegalArgumentException("Token cannot be blank");
|
||||
}
|
||||
|
@ -120,6 +171,8 @@ public class JwtSvid {
|
|||
val claimsSet = getJwtClaimsSet(signedJwt);
|
||||
validateAudience(claimsSet.getAudience(), audience);
|
||||
|
||||
val issuedAt = claimsSet.getIssueTime();
|
||||
|
||||
val expirationTime = claimsSet.getExpirationTime();
|
||||
validateExpiration(expirationTime);
|
||||
|
||||
|
@ -132,7 +185,8 @@ public class JwtSvid {
|
|||
verifySignature(signedJwt, jwtAuthority, algorithm, keyId);
|
||||
|
||||
val claimAudience = new HashSet<>(claimsSet.getAudience());
|
||||
return new JwtSvid(spiffeId, claimAudience, expirationTime, claimsSet.getClaims(), token);
|
||||
|
||||
return new JwtSvid(spiffeId, claimAudience, issuedAt, expirationTime, claimsSet.getClaims(), token, hint);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -151,6 +205,26 @@ public class JwtSvid {
|
|||
* @throws IllegalArgumentException when the token cannot be parsed
|
||||
*/
|
||||
public static JwtSvid parseInsecure(@NonNull final String token, @NonNull final Set<String> audience) throws JwtSvidException {
|
||||
return parseInsecure(token, audience, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates a JWT-SVID token and returns an instance of a {@link JwtSvid}.
|
||||
* <p>
|
||||
* The JWT-SVID signature is not verified.
|
||||
*
|
||||
* @param token a token as a string that is parsed and validated
|
||||
* @param audience audience as a list of strings used to validate the 'aud'
|
||||
* @param hint a hint that can be used to provide guidance on how this identity should be used
|
||||
* @return an instance of a {@link JwtSvid} with a SPIFFE ID parsed from the 'sub', audience from 'aud', and expiry
|
||||
* from 'exp' claim.
|
||||
* @throws JwtSvidException when the token expired or the expiration claim is missing,
|
||||
* when the 'aud' has an audience that is not in the audience provided as parameter,
|
||||
* when the 'alg' is not supported (See {@link JwtSignatureAlgorithm}),
|
||||
* when the header 'typ' is present and is not 'JWT' or 'JOSE'.
|
||||
* @throws IllegalArgumentException when the token cannot be parsed
|
||||
*/
|
||||
public static JwtSvid parseInsecure(@NonNull final String token, @NonNull final Set<String> audience, final String hint) throws JwtSvidException {
|
||||
if (StringUtils.isBlank(token)) {
|
||||
throw new IllegalArgumentException("Token cannot be blank");
|
||||
}
|
||||
|
@ -163,13 +237,16 @@ public class JwtSvid {
|
|||
val claimsSet = getJwtClaimsSet(signedJwt);
|
||||
validateAudience(claimsSet.getAudience(), audience);
|
||||
|
||||
val issuedAt = claimsSet.getIssueTime();
|
||||
|
||||
val expirationTime = claimsSet.getExpirationTime();
|
||||
validateExpiration(expirationTime);
|
||||
|
||||
val spiffeId = getSpiffeIdOfSubject(claimsSet);
|
||||
|
||||
val claimAudience = new HashSet<>(claimsSet.getAudience());
|
||||
return new JwtSvid(spiffeId, claimAudience, expirationTime, claimsSet.getClaims(), token);
|
||||
|
||||
return new JwtSvid(spiffeId, claimAudience, issuedAt, expirationTime, claimsSet.getClaims(), token, hint);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -192,6 +269,16 @@ public class JwtSvid {
|
|||
return new Date(expiry.getTime());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the SVID hint.
|
||||
*
|
||||
* @return the SVID hint
|
||||
*/
|
||||
public String getHint() {
|
||||
return hint;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the map of claims.
|
||||
*
|
||||
|
|
|
@ -2,6 +2,9 @@ package io.spiffe.svid.jwtsvid;
|
|||
|
||||
import io.spiffe.exception.JwtSvidException;
|
||||
import io.spiffe.spiffeid.SpiffeId;
|
||||
import lombok.NonNull;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Represents a source of SPIFFE JWT-SVIDs.
|
||||
|
@ -28,4 +31,25 @@ public interface JwtSvidSource {
|
|||
* @throws JwtSvidException when there is an error fetching the JWT SVID
|
||||
*/
|
||||
JwtSvid fetchJwtSvid(SpiffeId subject, String audience, String... extraAudiences) throws JwtSvidException;
|
||||
|
||||
/**
|
||||
* Fetches all SPIFFE JWT-SVIDs on one-shot blocking call.
|
||||
*
|
||||
* @param audience the audience of the JWT-SVID
|
||||
* @param extraAudience the extra audience for the JWT_SVID
|
||||
* @return all of {@link JwtSvid} object
|
||||
* @throws JwtSvidException if there is an error fetching or processing the JWT from the Workload API
|
||||
*/
|
||||
List<JwtSvid> fetchJwtSvids(@NonNull String audience, String... extraAudience) throws JwtSvidException;
|
||||
|
||||
/**
|
||||
* Fetches all SPIFFE JWT-SVIDs on one-shot blocking call.
|
||||
*
|
||||
* @param subject a SPIFFE ID
|
||||
* @param audience the audience of the JWT-SVID
|
||||
* @param extraAudience the extra audience for the JWT_SVID
|
||||
* @return all of {@link JwtSvid} object
|
||||
* @throws JwtSvidException if there is an error fetching or processing the JWT from the Workload API
|
||||
*/
|
||||
List<JwtSvid> fetchJwtSvids(@NonNull SpiffeId subject, @NonNull String audience, String... extraAudience) throws JwtSvidException;
|
||||
}
|
||||
|
|
|
@ -41,13 +41,23 @@ public class X509Svid {
|
|||
|
||||
PrivateKey privateKey;
|
||||
|
||||
/**
|
||||
* Hint is an operator-specified string used to provide guidance on how this
|
||||
* identity should be used by a workload when more than one SVID is returned.
|
||||
*/
|
||||
String hint;
|
||||
|
||||
|
||||
private X509Svid(
|
||||
final SpiffeId spiffeId,
|
||||
final List<X509Certificate> chain,
|
||||
final PrivateKey privateKey) {
|
||||
final PrivateKey privateKey,
|
||||
final String hint
|
||||
) {
|
||||
this.spiffeId = spiffeId;
|
||||
this.chain = chain;
|
||||
this.privateKey = privateKey;
|
||||
this.hint = hint;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -59,6 +69,16 @@ public class X509Svid {
|
|||
return chain.get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the SVID hint.
|
||||
*
|
||||
* @return the SVID hint
|
||||
*/
|
||||
public String getHint() {
|
||||
return hint;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the chain of X.509 certificates.
|
||||
*
|
||||
|
@ -93,7 +113,7 @@ public class X509Svid {
|
|||
} catch (IOException e) {
|
||||
throw new X509SvidException("Cannot read private key file", e);
|
||||
}
|
||||
return createX509Svid(certsBytes, privateKeyBytes, KeyFileFormat.PEM);
|
||||
return createX509Svid(certsBytes, privateKeyBytes, KeyFileFormat.PEM, null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -109,7 +129,24 @@ public class X509Svid {
|
|||
*/
|
||||
public static X509Svid parse(@NonNull final byte[] certsBytes, @NonNull final byte[] privateKeyBytes)
|
||||
throws X509SvidException {
|
||||
return createX509Svid(certsBytes, privateKeyBytes, KeyFileFormat.PEM);
|
||||
return parse(certsBytes, privateKeyBytes, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the X.509 SVID from PEM or DER blocks containing certificate chain and key
|
||||
* bytes. The key must be a PEM block with PKCS#8.
|
||||
* <p>
|
||||
* It is assumed that the leaf certificate is always the first certificate in the parsed chain.
|
||||
*
|
||||
* @param certsBytes chain of certificates as a byte array
|
||||
* @param privateKeyBytes private key as byte array
|
||||
* @param hint a hint that can be used to provide guidance on how this identity should be used
|
||||
* @return a {@link X509Svid} parsed from the given certBytes and privateKeyBytes
|
||||
* @throws X509SvidException if the given certsBytes or privateKeyBytes cannot be parsed
|
||||
*/
|
||||
public static X509Svid parse(@NonNull final byte[] certsBytes, @NonNull final byte[] privateKeyBytes, final String hint)
|
||||
throws X509SvidException {
|
||||
return createX509Svid(certsBytes, privateKeyBytes, KeyFileFormat.PEM, hint);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -125,7 +162,25 @@ public class X509Svid {
|
|||
*/
|
||||
public static X509Svid parseRaw(@NonNull final byte[] certsBytes,
|
||||
@NonNull final byte[] privateKeyBytes) throws X509SvidException {
|
||||
return createX509Svid(certsBytes, privateKeyBytes, KeyFileFormat.DER);
|
||||
return parseRaw(certsBytes, privateKeyBytes, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the X509-SVID from certificate and key bytes. The certificate must be ASN.1 DER (concatenated with
|
||||
* no intermediate padding if there are more than one certificate). The key must be a PKCS#8 ASN.1 DER.
|
||||
* <p>
|
||||
* It is assumed that the leaf certificate is always the first certificate in the parsed chain.
|
||||
*
|
||||
* @param certsBytes chain of certificates as a byte array
|
||||
* @param privateKeyBytes private key as byte array
|
||||
* @param hint a hint that can be used to provide guidance on how this identity should be used
|
||||
* @return a {@link X509Svid} parsed from the given certBytes and privateKeyBytes
|
||||
* @throws X509SvidException if the given certsBytes or privateKeyBytes cannot be parsed
|
||||
*/
|
||||
public static X509Svid parseRaw(@NonNull final byte[] certsBytes,
|
||||
@NonNull final byte[] privateKeyBytes,
|
||||
final String hint) throws X509SvidException {
|
||||
return createX509Svid(certsBytes, privateKeyBytes, KeyFileFormat.DER, hint);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -139,7 +194,8 @@ public class X509Svid {
|
|||
|
||||
private static X509Svid createX509Svid(final byte[] certsBytes,
|
||||
final byte[] privateKeyBytes,
|
||||
final KeyFileFormat keyFileFormat) throws X509SvidException {
|
||||
final KeyFileFormat keyFileFormat,
|
||||
final String hint) throws X509SvidException {
|
||||
|
||||
val x509Certificates = generateX509Certificates(certsBytes);
|
||||
val privateKey = generatePrivateKey(privateKeyBytes, keyFileFormat, x509Certificates);
|
||||
|
@ -152,7 +208,7 @@ public class X509Svid {
|
|||
validateSigningCertificates(x509Certificates);
|
||||
}
|
||||
|
||||
return new X509Svid(spiffeId, x509Certificates, privateKey);
|
||||
return new X509Svid(spiffeId, x509Certificates, privateKey, hint);
|
||||
}
|
||||
|
||||
private static SpiffeId getSpiffeId(final List<X509Certificate> x509Certificates) throws X509SvidException {
|
||||
|
|
|
@ -0,0 +1,338 @@
|
|||
package io.spiffe.workloadapi;
|
||||
|
||||
|
||||
import io.spiffe.bundle.jwtbundle.JwtBundle;
|
||||
import io.spiffe.bundle.jwtbundle.JwtBundleSet;
|
||||
import io.spiffe.bundle.x509bundle.X509Bundle;
|
||||
import io.spiffe.exception.*;
|
||||
import io.spiffe.spiffeid.SpiffeId;
|
||||
import io.spiffe.spiffeid.TrustDomain;
|
||||
import io.spiffe.svid.jwtsvid.JwtSvid;
|
||||
import lombok.NonNull;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.java.Log;
|
||||
import lombok.val;
|
||||
import org.apache.commons.lang3.tuple.ImmutablePair;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.time.Clock;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import static io.spiffe.workloadapi.internal.ThreadUtils.await;
|
||||
|
||||
/**
|
||||
* Represents a source of SPIFFE JWT SVIDs and JWT bundles maintained via the Workload API.
|
||||
* The JWT SVIDs are cached and fetchJwtSvid methods return from cache
|
||||
* checking that the JWT SVID has still at least half of its lifetime.
|
||||
*/
|
||||
@Log
|
||||
public class CachedJwtSource implements JwtSource {
|
||||
static final String TIMEOUT_SYSTEM_PROPERTY = "spiffe.newJwtSource.timeout";
|
||||
|
||||
static final Duration DEFAULT_TIMEOUT =
|
||||
Duration.parse(System.getProperty(TIMEOUT_SYSTEM_PROPERTY, "PT0S"));
|
||||
|
||||
// Synchronized map of JWT SVIDs, keyed by a pair of SPIFFE ID and a Set of audiences strings.
|
||||
// This map is used to cache the JWT SVIDs and avoid fetching them from the Workload API.
|
||||
private final
|
||||
Map<ImmutablePair<SpiffeId, Set<String>>, List<JwtSvid>> jwtSvids = new ConcurrentHashMap<>();
|
||||
|
||||
private JwtBundleSet bundles;
|
||||
|
||||
private final WorkloadApiClient workloadApiClient;
|
||||
private volatile boolean closed;
|
||||
private Clock clock;
|
||||
|
||||
// private constructor
|
||||
private CachedJwtSource(final WorkloadApiClient workloadApiClient) {
|
||||
this.clock = Clock.systemDefaultZone();
|
||||
this.workloadApiClient = workloadApiClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Cached JWT source. It blocks until the initial update with the JWT bundles
|
||||
* has been received from the Workload API or until the timeout configured
|
||||
* through the system property `spiffe.newJwtSource.timeout` expires.
|
||||
* If no timeout is configured, it blocks until it gets a JWT update from the Workload API.
|
||||
* <p>
|
||||
* It uses the default address socket endpoint from the environment variable to get the Workload API address.
|
||||
*
|
||||
* @return an instance of {@link DefaultJwtSource}, with the JWT bundles initialized
|
||||
* @throws SocketEndpointAddressException if the address to the Workload API is not valid
|
||||
* @throws JwtSourceException if the source could not be initialized
|
||||
*/
|
||||
public static JwtSource newSource() throws JwtSourceException, SocketEndpointAddressException {
|
||||
JwtSourceOptions options = JwtSourceOptions.builder().initTimeout(DEFAULT_TIMEOUT).build();
|
||||
return newSource(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new JWT source. It blocks until the initial update with the JWT bundles
|
||||
* has been received from the Workload API, doing retries with an exponential backoff policy,
|
||||
* or until the initTimeout has expired.
|
||||
* <p>
|
||||
* If the timeout is not provided in the options, the default timeout is read from the
|
||||
* system property `spiffe.newJwtSource.timeout`. If none is configured, this method will
|
||||
* block until the JWT bundles can be retrieved from the Workload API.
|
||||
* <p>
|
||||
* The {@link WorkloadApiClient} can be provided in the options, if it is not,
|
||||
* a new client is created.
|
||||
*
|
||||
* @param options {@link JwtSourceOptions}
|
||||
* @return an instance of {@link CachedJwtSource}, with the JWT bundles initialized
|
||||
* @throws SocketEndpointAddressException if the address to the Workload API is not valid
|
||||
* @throws JwtSourceException if the source could not be initialized
|
||||
*/
|
||||
public static JwtSource newSource(@NonNull final JwtSourceOptions options)
|
||||
throws SocketEndpointAddressException, JwtSourceException {
|
||||
if (options.getWorkloadApiClient() == null) {
|
||||
options.setWorkloadApiClient(createClient(options));
|
||||
}
|
||||
|
||||
if (options.getInitTimeout() == null) {
|
||||
options.setInitTimeout(DEFAULT_TIMEOUT);
|
||||
}
|
||||
|
||||
CachedJwtSource jwtSource = new CachedJwtSource(options.getWorkloadApiClient());
|
||||
|
||||
try {
|
||||
jwtSource.init(options.getInitTimeout());
|
||||
} catch (Exception e) {
|
||||
jwtSource.close();
|
||||
throw new JwtSourceException("Error creating JWT source", e);
|
||||
}
|
||||
|
||||
return jwtSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a JWT SVID for the given audiences. The JWT SVID is cached and
|
||||
* returned from the cache if it still has at least half of its lifetime.
|
||||
*
|
||||
* @param audience the audience
|
||||
* @param extraAudiences a list of extra audiences as an array of String
|
||||
* @return a {@link JwtSvid}
|
||||
* @throws JwtSvidException
|
||||
*/
|
||||
@Override
|
||||
public JwtSvid fetchJwtSvid(final String audience, final String... extraAudiences) throws JwtSvidException {
|
||||
if (isClosed()) {
|
||||
throw new IllegalStateException("JWT SVID source is closed");
|
||||
}
|
||||
|
||||
return getJwtSvids(audience, extraAudiences).get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a JWT SVID for the given subject and audience. The JWT SVID is cached and
|
||||
* returned from cache if it has still at least half of its lifetime.
|
||||
*
|
||||
* @return a {@link JwtSvid}
|
||||
* @throws IllegalStateException if the source is closed
|
||||
*/
|
||||
@Override
|
||||
public JwtSvid fetchJwtSvid(final SpiffeId subject, final String audience, final String... extraAudiences)
|
||||
throws JwtSvidException {
|
||||
if (isClosed()) {
|
||||
throw new IllegalStateException("JWT SVID source is closed");
|
||||
}
|
||||
|
||||
return getJwtSvids(subject, audience, extraAudiences).get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a list of JWT SVIDs for the given audience. The JWT SVIDs are cached and
|
||||
* returned from cache if they have still at least half of their lifetime.
|
||||
*
|
||||
* @return a list of {@link JwtSvid}s
|
||||
* @throws IllegalStateException if the source is closed
|
||||
*/
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(final String audience, final String... extraAudiences) throws JwtSvidException {
|
||||
if (isClosed()) {
|
||||
throw new IllegalStateException("JWT SVID source is closed");
|
||||
}
|
||||
|
||||
return getJwtSvids(audience, extraAudiences);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a list of JWT SVIDs for the given subject and audience. The JWT SVIDs are cached and
|
||||
* returned from cache if they have still at least half of their lifetime.
|
||||
*
|
||||
* @return a list of {@link JwtSvid}s
|
||||
* @throws IllegalStateException if the source is closed
|
||||
*/
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(final SpiffeId subject, final String audience, final String... extraAudiences)
|
||||
throws JwtSvidException {
|
||||
if (isClosed()) {
|
||||
throw new IllegalStateException("JWT SVID source is closed");
|
||||
}
|
||||
|
||||
return getJwtSvids(subject, audience, extraAudiences);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the JWT bundle for a given trust domain.
|
||||
*
|
||||
* @return an instance of a {@link X509Bundle}
|
||||
* @throws BundleNotFoundException is there is no bundle for the trust domain provided
|
||||
* @throws IllegalStateException if the source is closed
|
||||
*/
|
||||
@Override
|
||||
public JwtBundle getBundleForTrustDomain(@NonNull final TrustDomain trustDomain) throws BundleNotFoundException {
|
||||
if (isClosed()) {
|
||||
throw new IllegalStateException("JWT bundle source is closed");
|
||||
}
|
||||
return bundles.getBundleForTrustDomain(trustDomain);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes this source, dropping the connection to the Workload API.
|
||||
* Other source methods will return an error after close has been called.
|
||||
* <p>
|
||||
* It is marked with {@link SneakyThrows} because it is not expected to throw
|
||||
* the checked exception defined on the {@link Closeable} interface.
|
||||
*/
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public void close() {
|
||||
if (!closed) {
|
||||
synchronized (this) {
|
||||
if (!closed) {
|
||||
workloadApiClient.close();
|
||||
closed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the jwtSvids map contains the cacheKey, returns it if it does and the JWT SVID has not passed its half lifetime.
|
||||
// If the cache does not contain the key or the JWT SVID has passed its half lifetime, make a new FetchJWTSVID call to the Workload API,
|
||||
// adds the JWT SVIDs to the cache map and returns them.
|
||||
// Only one thread can fetch new JWT SVIDs and update the cache at a time.
|
||||
private List<JwtSvid> getJwtSvids(SpiffeId subject, String audience, String... extraAudiences) throws JwtSvidException {
|
||||
Set<String> audiencesSet = getAudienceSet(audience, extraAudiences);
|
||||
ImmutablePair<SpiffeId, Set<String>> cacheKey = new ImmutablePair<>(subject, audiencesSet);
|
||||
|
||||
List<JwtSvid> svidList = jwtSvids.get(cacheKey);
|
||||
if (svidList != null && !isTokenPastHalfLifetime(svidList.get(0))) {
|
||||
return svidList;
|
||||
}
|
||||
|
||||
// even using ConcurrentHashMap, there is a possibility of multiple threads trying to fetch new JWT SVIDs at the same time.
|
||||
synchronized (this) {
|
||||
// Check again if the jwtSvids map contains the cacheKey, and return the entry if it exists and the JWT SVID has not passed its half lifetime.
|
||||
// If it does not exist or the JWT-SVID has passed half its lifetime, call the Workload API to fetch new JWT-SVIDs,
|
||||
// add them to the cache map, and return the list of JWT-SVIDs.
|
||||
svidList = jwtSvids.get(cacheKey);
|
||||
if (svidList != null && !isTokenPastHalfLifetime(svidList.get(0))) {
|
||||
return svidList;
|
||||
}
|
||||
|
||||
if (cacheKey.left == null) {
|
||||
svidList = workloadApiClient.fetchJwtSvids(audience, extraAudiences);
|
||||
} else {
|
||||
svidList = workloadApiClient.fetchJwtSvids(cacheKey.left, audience, extraAudiences);
|
||||
}
|
||||
jwtSvids.put(cacheKey, svidList);
|
||||
return svidList;
|
||||
}
|
||||
}
|
||||
|
||||
private List<JwtSvid> getJwtSvids(String audience, String... extraAudiences) throws JwtSvidException {
|
||||
return getJwtSvids(null, audience, extraAudiences);
|
||||
}
|
||||
|
||||
private static Set<String> getAudienceSet(String audience, String[] extraAudiences) {
|
||||
Set<String> audiencesString;
|
||||
if (extraAudiences != null && extraAudiences.length > 0) {
|
||||
audiencesString = new HashSet<>(Arrays.asList(extraAudiences));
|
||||
audiencesString.add(audience);
|
||||
} else {
|
||||
audiencesString = Collections.singleton(audience);
|
||||
}
|
||||
return audiencesString;
|
||||
}
|
||||
|
||||
private boolean isTokenPastHalfLifetime(JwtSvid jwtSvid) {
|
||||
Instant now = clock.instant();
|
||||
val halfLife = new Date(jwtSvid.getExpiry().getTime() - (jwtSvid.getExpiry().getTime() - jwtSvid.getIssuedAt().getTime()) / 2);
|
||||
val halfLifeInstant = Instant.ofEpochMilli(halfLife.getTime());
|
||||
return now.isAfter(halfLifeInstant);
|
||||
}
|
||||
|
||||
|
||||
private void init(final Duration timeout) throws TimeoutException {
|
||||
CountDownLatch done = new CountDownLatch(1);
|
||||
setJwtBundlesWatcher(done);
|
||||
|
||||
boolean success;
|
||||
if (timeout.isZero()) {
|
||||
await(done);
|
||||
success = true;
|
||||
} else {
|
||||
success = await(done, timeout.getSeconds(), TimeUnit.SECONDS);
|
||||
}
|
||||
if (!success) {
|
||||
throw new TimeoutException("Timeout waiting for JWT bundles update");
|
||||
}
|
||||
}
|
||||
|
||||
private void setJwtBundlesWatcher(final CountDownLatch done) {
|
||||
workloadApiClient.watchJwtBundles(new Watcher<JwtBundleSet>() {
|
||||
@Override
|
||||
public void onUpdate(final JwtBundleSet update) {
|
||||
log.log(Level.INFO, "Received JwtBundleSet update");
|
||||
setJwtBundleSet(update);
|
||||
done.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(final Throwable error) {
|
||||
log.log(Level.SEVERE, "Error in JwtBundleSet watcher", error);
|
||||
done.countDown();
|
||||
throw new WatcherException("Error fetching JwtBundleSet", error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void setJwtBundleSet(final JwtBundleSet update) {
|
||||
synchronized (this) {
|
||||
this.bundles = update;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isClosed() {
|
||||
synchronized (this) {
|
||||
return closed;
|
||||
}
|
||||
}
|
||||
|
||||
private static WorkloadApiClient createClient(final JwtSourceOptions options)
|
||||
throws SocketEndpointAddressException {
|
||||
val clientOptions = DefaultWorkloadApiClient.ClientOptions
|
||||
.builder()
|
||||
.spiffeSocketPath(options.getSpiffeSocketPath())
|
||||
.build();
|
||||
return DefaultWorkloadApiClient.newClient(clientOptions);
|
||||
}
|
||||
|
||||
void setClock(Clock clock) {
|
||||
this.clock = clock;
|
||||
}
|
||||
}
|
|
@ -3,25 +3,18 @@ package io.spiffe.workloadapi;
|
|||
import io.spiffe.bundle.jwtbundle.JwtBundle;
|
||||
import io.spiffe.bundle.jwtbundle.JwtBundleSet;
|
||||
import io.spiffe.bundle.x509bundle.X509Bundle;
|
||||
import io.spiffe.exception.BundleNotFoundException;
|
||||
import io.spiffe.exception.JwtSourceException;
|
||||
import io.spiffe.exception.JwtSvidException;
|
||||
import io.spiffe.exception.SocketEndpointAddressException;
|
||||
import io.spiffe.exception.WatcherException;
|
||||
import io.spiffe.exception.*;
|
||||
import io.spiffe.spiffeid.SpiffeId;
|
||||
import io.spiffe.spiffeid.TrustDomain;
|
||||
import io.spiffe.svid.jwtsvid.JwtSvid;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NonNull;
|
||||
import lombok.Setter;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.java.Log;
|
||||
import lombok.val;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
@ -86,18 +79,18 @@ public class DefaultJwtSource implements JwtSource {
|
|||
*/
|
||||
public static JwtSource newSource(@NonNull final JwtSourceOptions options)
|
||||
throws SocketEndpointAddressException, JwtSourceException {
|
||||
if (options.workloadApiClient == null) {
|
||||
options.workloadApiClient = createClient(options);
|
||||
if (options.getWorkloadApiClient()== null) {
|
||||
options.setWorkloadApiClient(createClient(options));
|
||||
}
|
||||
|
||||
if (options.initTimeout == null) {
|
||||
options.initTimeout = DEFAULT_TIMEOUT;
|
||||
if (options.getInitTimeout()== null) {
|
||||
options.setInitTimeout(DEFAULT_TIMEOUT);
|
||||
}
|
||||
|
||||
DefaultJwtSource jwtSource = new DefaultJwtSource(options.workloadApiClient);
|
||||
DefaultJwtSource jwtSource = new DefaultJwtSource(options.getWorkloadApiClient());
|
||||
|
||||
try {
|
||||
jwtSource.init(options.initTimeout);
|
||||
jwtSource.init(options.getInitTimeout());
|
||||
} catch (Exception e) {
|
||||
jwtSource.close();
|
||||
throw new JwtSourceException("Error creating JWT source", e);
|
||||
|
@ -130,6 +123,30 @@ public class DefaultJwtSource implements JwtSource {
|
|||
return workloadApiClient.fetchJwtSvid(subject, audience, extraAudiences);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(String audience, String... extraAudiences) throws JwtSvidException {
|
||||
if (isClosed()) {
|
||||
throw new IllegalStateException("JWT SVID source is closed");
|
||||
}
|
||||
return workloadApiClient.fetchJwtSvids(audience, extraAudiences);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all new JWT SVIDs from the Workload API for the given subject SPIFFE ID and audiences.
|
||||
*
|
||||
* @return all {@link JwtSvid}s
|
||||
* @throws IllegalStateException if the source is closed
|
||||
*/
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(final SpiffeId subject, final String audience, final String... extraAudiences)
|
||||
throws JwtSvidException {
|
||||
if (isClosed()) {
|
||||
throw new IllegalStateException("JWT SVID source is closed");
|
||||
}
|
||||
|
||||
return workloadApiClient.fetchJwtSvids(subject, audience, extraAudiences);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the JWT bundle for a given trust domain.
|
||||
*
|
||||
|
@ -216,42 +233,8 @@ public class DefaultJwtSource implements JwtSource {
|
|||
throws SocketEndpointAddressException {
|
||||
val clientOptions = DefaultWorkloadApiClient.ClientOptions
|
||||
.builder()
|
||||
.spiffeSocketPath(options.spiffeSocketPath)
|
||||
.spiffeSocketPath(options.getSpiffeSocketPath())
|
||||
.build();
|
||||
return DefaultWorkloadApiClient.newClient(clientOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Options to configure a {@link DefaultJwtSource}.
|
||||
* <p>
|
||||
* <code>spiffeSocketPath</code> Address to the Workload API, if it is not set, the default address will be used.
|
||||
* <p>
|
||||
* <code>initTimeout</code> Timeout for initializing the instance. If it is not defined, the timeout is read
|
||||
* from the System property `spiffe.newJwtSource.timeout'. If this is also not defined, no default timeout is applied.
|
||||
* <p>
|
||||
* <code>workloadApiClient</code> A custom instance of a {@link WorkloadApiClient}, if it is not set,
|
||||
* a new client will be created.
|
||||
*/
|
||||
@Data
|
||||
public static class JwtSourceOptions {
|
||||
|
||||
@Setter(AccessLevel.NONE)
|
||||
private String spiffeSocketPath;
|
||||
|
||||
@Setter(AccessLevel.NONE)
|
||||
private Duration initTimeout;
|
||||
|
||||
@Setter(AccessLevel.NONE)
|
||||
private WorkloadApiClient workloadApiClient;
|
||||
|
||||
@Builder
|
||||
public JwtSourceOptions(
|
||||
final String spiffeSocketPath,
|
||||
final WorkloadApiClient workloadApiClient,
|
||||
final Duration initTimeout) {
|
||||
this.spiffeSocketPath = spiffeSocketPath;
|
||||
this.workloadApiClient = workloadApiClient;
|
||||
this.initTimeout = initTimeout;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ import java.util.logging.Level;
|
|||
import static io.spiffe.workloadapi.StreamObservers.getJwtBundleStreamObserver;
|
||||
import static io.spiffe.workloadapi.StreamObservers.getX509BundlesStreamObserver;
|
||||
import static io.spiffe.workloadapi.StreamObservers.getX509ContextStreamObserver;
|
||||
import static org.apache.commons.lang3.StringUtils.EMPTY;
|
||||
|
||||
/**
|
||||
* Represents a client to interact with the Workload API.
|
||||
|
@ -241,6 +242,40 @@ public final class DefaultWorkloadApiClient implements WorkloadApiClient {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull String audience, String... extraAudience) throws JwtSvidException {
|
||||
final Set<String> audParam = createAudienceSet(audience, extraAudience);
|
||||
try (val cancellableContext = Context.current().withCancellation()) {
|
||||
return cancellableContext.call(() -> callFetchJwtSvids(audParam));
|
||||
} catch (Exception e) {
|
||||
throw new JwtSvidException("Error fetching JWT SVID", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull final SpiffeId subject,
|
||||
@NonNull final String audience,
|
||||
final String... extraAudience)
|
||||
throws JwtSvidException {
|
||||
|
||||
final Set<String> audParam = createAudienceSet(audience, extraAudience);
|
||||
|
||||
try (val cancellableContext = Context.current().withCancellation()) {
|
||||
return cancellableContext.call(() -> callFetchJwtSvids(subject, audParam));
|
||||
} catch (Exception e) {
|
||||
throw new JwtSvidException("Error fetching JWT SVID", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
|
@ -272,7 +307,7 @@ public final class DefaultWorkloadApiClient implements WorkloadApiClient {
|
|||
if (response == null || StringUtils.isBlank(response.getSpiffeId())) {
|
||||
throw new JwtSvidException("Error validating JWT SVID. Empty response from Workload API");
|
||||
}
|
||||
return JwtSvid.parseInsecure(token, Collections.singleton(audience));
|
||||
return JwtSvid.parseInsecure(token, Collections.singleton(audience), EMPTY);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -305,6 +340,7 @@ public final class DefaultWorkloadApiClient implements WorkloadApiClient {
|
|||
if (managedChannel != null) {
|
||||
managedChannel.close();
|
||||
}
|
||||
|
||||
retryExecutor.shutdown();
|
||||
executorService.shutdown();
|
||||
closed = true;
|
||||
|
@ -330,7 +366,7 @@ public final class DefaultWorkloadApiClient implements WorkloadApiClient {
|
|||
.addAllAudience(audience)
|
||||
.build();
|
||||
val response = workloadApiBlockingStub.fetchJWTSVID(jwtSvidRequest);
|
||||
return processJwtSvidResponse(response, audience);
|
||||
return processJwtSvidResponse(response, audience, true).get(0);
|
||||
}
|
||||
|
||||
private JwtSvid callFetchJwtSvid(final Set<String> audience) throws JwtSvidException {
|
||||
|
@ -338,14 +374,47 @@ public final class DefaultWorkloadApiClient implements WorkloadApiClient {
|
|||
.addAllAudience(audience)
|
||||
.build();
|
||||
val response = workloadApiBlockingStub.fetchJWTSVID(jwtSvidRequest);
|
||||
return processJwtSvidResponse(response, audience);
|
||||
return processJwtSvidResponse(response, audience, true).get(0);
|
||||
}
|
||||
|
||||
private JwtSvid processJwtSvidResponse(Workload.JWTSVIDResponse response, Set<String> audience) throws JwtSvidException {
|
||||
private List<JwtSvid> callFetchJwtSvids(final SpiffeId subject, final Set<String> audience) throws JwtSvidException {
|
||||
val jwtSvidRequest = Workload.JWTSVIDRequest.newBuilder()
|
||||
.setSpiffeId(subject.toString())
|
||||
.addAllAudience(audience)
|
||||
.build();
|
||||
val response = workloadApiBlockingStub.fetchJWTSVID(jwtSvidRequest);
|
||||
return processJwtSvidResponse(response, audience, false);
|
||||
}
|
||||
|
||||
private List<JwtSvid> callFetchJwtSvids(final Set<String> audience) throws JwtSvidException {
|
||||
val jwtSvidRequest = Workload.JWTSVIDRequest.newBuilder()
|
||||
.addAllAudience(audience)
|
||||
.build();
|
||||
val response = workloadApiBlockingStub.fetchJWTSVID(jwtSvidRequest);
|
||||
return processJwtSvidResponse(response, audience, false);
|
||||
}
|
||||
|
||||
private List<JwtSvid> processJwtSvidResponse(Workload.JWTSVIDResponse response, Set<String> audience, boolean firstOnly) throws JwtSvidException {
|
||||
if (response.getSvidsList() == null || response.getSvidsList().isEmpty()) {
|
||||
throw new JwtSvidException("JWT SVID response from the Workload API is empty");
|
||||
}
|
||||
return JwtSvid.parseInsecure(response.getSvids(0).getSvid(), audience);
|
||||
int n = response.getSvidsCount();
|
||||
if (firstOnly) {
|
||||
n = 1;
|
||||
}
|
||||
ArrayList<JwtSvid> svids = new ArrayList<>(n);
|
||||
HashSet<String> hints = new HashSet<>();
|
||||
for (int i = 0; i < n; i++) {
|
||||
// In the event of more than one JWTSVID message with the same hint value set, then the first message in the
|
||||
// list SHOULD be selected.
|
||||
if (hints.contains(response.getSvids(i).getHint())) {
|
||||
continue;
|
||||
}
|
||||
val svid = JwtSvid.parseInsecure(response.getSvids(i).getSvid(), audience, response.getSvids(i).getHint());
|
||||
hints.add(svid.getHint());
|
||||
svids.add(svid);
|
||||
}
|
||||
return svids;
|
||||
}
|
||||
|
||||
private JwtBundleSet callFetchBundles() throws JwtBundleException {
|
||||
|
|
|
@ -20,6 +20,7 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
|
||||
/**
|
||||
* Utility methods for converting GRPC objects to JAVA-SPIFFE domain objects.
|
||||
|
@ -132,9 +133,16 @@ final class GrpcConversionUtils {
|
|||
private static List<X509Svid> getListOfX509Svid(final Workload.X509SVIDResponse x509SvidResponse) throws X509ContextException{
|
||||
|
||||
final List<X509Svid> result = new ArrayList<>();
|
||||
HashSet<String> hints = new HashSet<>();
|
||||
|
||||
for (Workload.X509SVID x509SVID : x509SvidResponse.getSvidsList()) {
|
||||
// In the event of more than one X509SVID message with the same hint value set, then the first message in the
|
||||
// list SHOULD be selected.
|
||||
if (hints.contains(x509SVID.getHint())) {
|
||||
continue;
|
||||
}
|
||||
val svid = createAndValidateX509Svid(x509SVID);
|
||||
hints.add(svid.getHint());
|
||||
result.add(svid);
|
||||
}
|
||||
return result;
|
||||
|
@ -144,9 +152,9 @@ final class GrpcConversionUtils {
|
|||
byte[] certsBytes = x509SVID.getX509Svid().toByteArray();
|
||||
byte[] privateKeyBytes = x509SVID.getX509SvidKey().toByteArray();
|
||||
|
||||
X509Svid svid = null;
|
||||
X509Svid svid;
|
||||
try {
|
||||
svid = X509Svid.parseRaw(certsBytes, privateKeyBytes);
|
||||
svid = X509Svid.parseRaw(certsBytes, privateKeyBytes, x509SVID.getHint());
|
||||
} catch (X509SvidException e) {
|
||||
throw new X509ContextException("X.509 SVID response could not be processed", e);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
package io.spiffe.workloadapi;
|
||||
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.Setter;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
/**
|
||||
* Options to configure a {@link JwtSource}.
|
||||
* <p>
|
||||
* <code>spiffeSocketPath</code> Address to the Workload API, if it is not set, the default address will be used.
|
||||
* <p>
|
||||
* <code>initTimeout</code> Timeout for initializing the instance. If it is not defined, the timeout is read
|
||||
* from the System property `spiffe.newJwtSource.timeout'. If this is also not defined, no default timeout is applied.
|
||||
* <p>
|
||||
* <code>workloadApiClient</code> A custom instance of a {@link WorkloadApiClient}, if it is not set,
|
||||
* a new client will be created.
|
||||
*/
|
||||
@Data
|
||||
public class JwtSourceOptions {
|
||||
|
||||
@Setter(AccessLevel.PUBLIC)
|
||||
private String spiffeSocketPath;
|
||||
|
||||
@Setter(AccessLevel.PUBLIC)
|
||||
private Duration initTimeout;
|
||||
|
||||
@Setter(AccessLevel.PUBLIC)
|
||||
private WorkloadApiClient workloadApiClient;
|
||||
|
||||
@Builder
|
||||
public JwtSourceOptions(
|
||||
final String spiffeSocketPath,
|
||||
final WorkloadApiClient workloadApiClient,
|
||||
final Duration initTimeout) {
|
||||
this.spiffeSocketPath = spiffeSocketPath;
|
||||
this.workloadApiClient = workloadApiClient;
|
||||
this.initTimeout = initTimeout;
|
||||
}
|
||||
}
|
|
@ -45,7 +45,9 @@ final class StreamObservers {
|
|||
|
||||
@Override
|
||||
public void onError(final Throwable t) {
|
||||
log.log(Level.SEVERE, "X.509 context observer error", t);
|
||||
if (Status.fromThrowable(t).getCode() != Status.Code.CANCELLED) {
|
||||
log.log(Level.SEVERE, "X.509 context observer error", t);
|
||||
}
|
||||
handleWatchX509ContextError(t);
|
||||
}
|
||||
|
||||
|
@ -59,7 +61,7 @@ final class StreamObservers {
|
|||
|
||||
private void handleX509ContextRetry(Throwable t) {
|
||||
if (retryHandler.shouldRetry()) {
|
||||
log.log(Level.INFO, "Retrying connecting to Workload API to register X.509 context watcher");
|
||||
log.log(Level.FINE, "Retrying connecting to Workload API to register X.509 context watcher");
|
||||
retryHandler.scheduleRetry(() ->
|
||||
cancellableContext.run(
|
||||
() -> workloadApiAsyncStub.fetchX509SVID(newX509SvidRequest(),
|
||||
|
@ -97,7 +99,9 @@ final class StreamObservers {
|
|||
|
||||
@Override
|
||||
public void onError(final Throwable t) {
|
||||
log.log(Level.SEVERE, "X.509 bundles observer error", t);
|
||||
if (Status.fromThrowable(t).getCode() != Status.Code.CANCELLED) {
|
||||
log.log(Level.SEVERE, "X.509 bundles observer error", t);
|
||||
}
|
||||
handleWatchX509BundlesError(t);
|
||||
}
|
||||
|
||||
|
@ -111,7 +115,7 @@ final class StreamObservers {
|
|||
|
||||
private void handleX509BundlesRetry(Throwable t) {
|
||||
if (retryHandler.shouldRetry()) {
|
||||
log.log(Level.INFO, "Retrying connecting to Workload API to register X.509 bundles watcher");
|
||||
log.log(Level.FINE, "Retrying connecting to Workload API to register X.509 bundles watcher");
|
||||
retryHandler.scheduleRetry(() ->
|
||||
cancellableContext.run(
|
||||
() -> workloadApiAsyncStub.fetchX509Bundles(newX509BundlesRequest(),
|
||||
|
@ -149,7 +153,9 @@ final class StreamObservers {
|
|||
|
||||
@Override
|
||||
public void onError(final Throwable t) {
|
||||
log.log(Level.SEVERE, "JWT observer error", t);
|
||||
if (Status.fromThrowable(t).getCode() != Status.Code.CANCELLED) {
|
||||
log.log(Level.SEVERE, "JWT observer error", t);
|
||||
}
|
||||
handleWatchJwtBundleError(t);
|
||||
}
|
||||
|
||||
|
@ -163,7 +169,7 @@ final class StreamObservers {
|
|||
|
||||
private void handleJwtBundleRetry(Throwable t) {
|
||||
if (retryHandler.shouldRetry()) {
|
||||
log.log(Level.INFO, "Retrying connecting to Workload API to register JWT Bundles watcher");
|
||||
log.log(Level.FINE, "Retrying connecting to Workload API to register JWT Bundles watcher");
|
||||
retryHandler.scheduleRetry(() ->
|
||||
cancellableContext.run(() -> workloadApiAsyncStub.fetchJWTBundles(newJwtBundlesRequest(),
|
||||
this)));
|
||||
|
|
|
@ -11,6 +11,7 @@ import io.spiffe.svid.jwtsvid.JwtSvid;
|
|||
import lombok.NonNull;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Represents a client to interact with the Workload API.
|
||||
|
@ -78,6 +79,27 @@ public interface WorkloadApiClient extends Closeable {
|
|||
*/
|
||||
JwtSvid fetchJwtSvid(@NonNull SpiffeId subject, @NonNull String audience, String... extraAudience) throws JwtSvidException;
|
||||
|
||||
/**
|
||||
* Fetches all SPIFFE JWT-SVIDs on one-shot blocking call.
|
||||
*
|
||||
* @param audience the audience of the JWT-SVID
|
||||
* @param extraAudience the extra audience for the JWT_SVID
|
||||
* @return all of {@link JwtSvid} object
|
||||
* @throws JwtSvidException if there is an error fetching or processing the JWT from the Workload API
|
||||
*/
|
||||
List<JwtSvid> fetchJwtSvids(@NonNull String audience, String... extraAudience) throws JwtSvidException;
|
||||
|
||||
/**
|
||||
* Fetches a SPIFFE JWT-SVID on one-shot blocking call.
|
||||
*
|
||||
* @param subject a SPIFFE ID
|
||||
* @param audience the audience of the JWT-SVID
|
||||
* @param extraAudience the extra audience for the JWT_SVID
|
||||
* @return all of {@link JwtSvid} objectÏ
|
||||
* @throws JwtSvidException if there is an error fetching or processing the JWT from the Workload API
|
||||
*/
|
||||
List<JwtSvid> fetchJwtSvids(@NonNull SpiffeId subject, @NonNull String audience, String... extraAudience) throws JwtSvidException;
|
||||
|
||||
/**
|
||||
* Fetches the JWT bundles for JWT-SVID validation, keyed by trust domain.
|
||||
*
|
||||
|
|
|
@ -28,6 +28,10 @@ public class RetryHandler {
|
|||
* @param runnable the task to be scheduled for execution
|
||||
*/
|
||||
public void scheduleRetry(final Runnable runnable) {
|
||||
if (executor.isShutdown()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (exponentialBackoffPolicy.reachedMaxRetries(retryCount)) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -76,6 +76,12 @@ message X509SVID {
|
|||
|
||||
// Required. ASN.1 DER encoded X.509 bundle for the trust domain.
|
||||
bytes bundle = 4;
|
||||
|
||||
// Optional. An operator-specified string used to provide guidance on how this
|
||||
// identity should be used by a workload when more than one SVID is returned.
|
||||
// For example, `internal` and `external` to indicate an SVID for internal or
|
||||
// external use, respectively.
|
||||
string hint = 5;
|
||||
}
|
||||
|
||||
// The X509BundlesRequest message conveys parameters for requesting X.509
|
||||
|
@ -117,6 +123,12 @@ message JWTSVID {
|
|||
|
||||
// Required. Encoded JWT using JWS Compact Serialization.
|
||||
string svid = 2;
|
||||
|
||||
// Optional. An operator-specified string used to provide guidance on how this
|
||||
// identity should be used by a workload when more than one SVID is returned.
|
||||
// For example, `internal` and `external` to indicate an SVID for internal or
|
||||
// external use, respectively.
|
||||
string hint = 3;
|
||||
}
|
||||
|
||||
// The JWTBundlesRequest message conveys parameters for requesting JWT bundles.
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.junit.jupiter.api.Test;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
@ -219,6 +220,25 @@ class JwtBundleTest {
|
|||
assertNotNull(jwtBundle.getJwtAuthorities().get("C6vs25welZOx6WksNYfbMfiw9l96pMnD"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseJWKSWithEmptyKeysArray_Success() {
|
||||
TrustDomain trustDomain = TrustDomain.parse("example.org");
|
||||
String jwksEmptyKeysJson = "{\"keys\": []}";
|
||||
byte[] bundleBytes = jwksEmptyKeysJson.getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
|
||||
JwtBundle jwtBundle = null;
|
||||
try {
|
||||
jwtBundle = JwtBundle.parse(trustDomain, bundleBytes);
|
||||
} catch (JwtBundleException e) {
|
||||
fail("Parsing failed with exception: " + e.getMessage());
|
||||
}
|
||||
|
||||
assertNotNull(jwtBundle, "JwtBundle should not be null");
|
||||
assertEquals(trustDomain, jwtBundle.getTrustDomain(), "Trust domain should match");
|
||||
assertTrue(jwtBundle.getJwtAuthorities().isEmpty(), "JwtAuthorities should be empty");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParse_MissingKid_Fails() throws URISyntaxException, IOException {
|
||||
Path path = Paths.get(toUri("testdata/jwtbundle/jwks_missing_kid.json"));
|
||||
|
@ -269,7 +289,7 @@ class JwtBundleTest {
|
|||
JwtBundle bundle = jwtBundle.getBundleForTrustDomain(TrustDomain.parse("example.org"));
|
||||
assertEquals(jwtBundle, bundle);
|
||||
} catch (BundleNotFoundException e) {
|
||||
fail(e);
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -305,8 +325,8 @@ class JwtBundleTest {
|
|||
} catch (AuthorityNotFoundException e) {
|
||||
fail(e);
|
||||
}
|
||||
assertEquals(key1, jwtAuthority1 );
|
||||
assertEquals(key2, jwtAuthority2 );
|
||||
assertEquals(key1, jwtAuthority1);
|
||||
assertEquals(key2, jwtAuthority2);
|
||||
|
||||
// Test RemoveJwtAuthority
|
||||
jwtBundle.removeJwtAuthority("key1");
|
||||
|
|
|
@ -71,14 +71,14 @@ class SpiffeIdUtilsTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void toSetOfSpiffeIdsInvalidSeparator() {
|
||||
val spiffeIdsAsString = "spiffe://example.org/workload1, spiffe://example.org/workload2";
|
||||
try {
|
||||
SpiffeIdUtils.toSetOfSpiffeIds(spiffeIdsAsString, ',');
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Separator character is not supported.", e.getMessage());
|
||||
}
|
||||
void toSetOfSpiffeIdsCommaSeparator() {
|
||||
val spiffeIdsAsString = "spiffe://example.org/workload1,spiffe://example.org/workload2";
|
||||
val spiffeIdSet = SpiffeIdUtils.toSetOfSpiffeIds(spiffeIdsAsString, ',');
|
||||
|
||||
assertNotNull(spiffeIdSet);
|
||||
assertEquals(2, spiffeIdSet.size());
|
||||
assertTrue(spiffeIdSet.contains(SpiffeId.parse("spiffe://example.org/workload1")));
|
||||
assertTrue(spiffeIdSet.contains(SpiffeId.parse("spiffe://example.org/workload2")));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -39,10 +39,11 @@ class JwtSvidParseAndValidateTest {
|
|||
void parseAndValidateValidJwt(TestCase testCase) {
|
||||
try {
|
||||
String token = testCase.generateToken.get();
|
||||
JwtSvid jwtSvid = JwtSvid.parseAndValidate(token, testCase.jwtBundle, testCase.audience);
|
||||
JwtSvid jwtSvid = JwtSvid.parseAndValidate(token, testCase.jwtBundle, testCase.audience, testCase.hint);
|
||||
|
||||
assertEquals(testCase.expectedJwtSvid.getSpiffeId(), jwtSvid.getSpiffeId());
|
||||
assertEquals(testCase.expectedJwtSvid.getAudience(), jwtSvid.getAudience());
|
||||
assertEquals(testCase.expectedJwtSvid.getHint(), jwtSvid.getHint());
|
||||
assertEquals(testCase.expectedJwtSvid.getExpiry().toInstant().getEpochSecond(), jwtSvid.getExpiry().toInstant().getEpochSecond());
|
||||
assertEquals(token, jwtSvid.getToken());
|
||||
assertEquals(token, jwtSvid.marshal());
|
||||
|
@ -124,6 +125,7 @@ class JwtSvidParseAndValidateTest {
|
|||
jwtBundle.putJwtAuthority("authority3", key3.getPublic());
|
||||
|
||||
SpiffeId spiffeId = trustDomain.newSpiffeId("host");
|
||||
Date issuedAt = new Date();
|
||||
Date expiration = new Date(System.currentTimeMillis() + (60 * 60 * 1000));
|
||||
Set<String> audience = new HashSet<String>() {{add("audience1"); add("audience2");}};
|
||||
|
||||
|
@ -136,11 +138,16 @@ class JwtSvidParseAndValidateTest {
|
|||
.expectedAudience(Collections.singleton("audience1"))
|
||||
.generateToken(() -> TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JOSE))
|
||||
.expectedException(null)
|
||||
.hint("external")
|
||||
.expectedJwtSvid(newJwtSvidInstance(
|
||||
trustDomain.newSpiffeId("host"),
|
||||
audience,
|
||||
issuedAt,
|
||||
expiration,
|
||||
claims.getClaims(), TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JOSE) ))
|
||||
claims.getClaims(),
|
||||
TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JOSE),
|
||||
"external"
|
||||
))
|
||||
.build()),
|
||||
Arguments.of(TestCase.builder()
|
||||
.name("using RSA signature")
|
||||
|
@ -148,11 +155,15 @@ class JwtSvidParseAndValidateTest {
|
|||
.expectedAudience(audience)
|
||||
.generateToken(() -> TestUtils.generateToken(claims, key3, "authority3", JwtSvid.HEADER_TYP_JWT))
|
||||
.expectedException(null)
|
||||
.hint("internal")
|
||||
.expectedJwtSvid(newJwtSvidInstance(
|
||||
trustDomain.newSpiffeId("host"),
|
||||
audience,
|
||||
issuedAt,
|
||||
expiration,
|
||||
claims.getClaims(), TestUtils.generateToken(claims, key3, "authority3", JwtSvid.HEADER_TYP_JWT)))
|
||||
claims.getClaims(), TestUtils.generateToken(claims, key3, "authority3", JwtSvid.HEADER_TYP_JWT),
|
||||
"internal"
|
||||
))
|
||||
.build()),
|
||||
Arguments.of(TestCase.builder()
|
||||
.name("using empty typ")
|
||||
|
@ -160,11 +171,16 @@ class JwtSvidParseAndValidateTest {
|
|||
.expectedAudience(audience)
|
||||
.generateToken(() -> TestUtils.generateToken(claims, key3, "authority3", ""))
|
||||
.expectedException(null)
|
||||
.hint("")
|
||||
.expectedJwtSvid(newJwtSvidInstance(
|
||||
trustDomain.newSpiffeId("host"),
|
||||
audience,
|
||||
issuedAt,
|
||||
expiration,
|
||||
claims.getClaims(), TestUtils.generateToken(claims, key3, "authority3")))
|
||||
claims.getClaims(),
|
||||
TestUtils.generateToken(claims, key3, "authority3"),
|
||||
""
|
||||
))
|
||||
.build())
|
||||
);
|
||||
}
|
||||
|
@ -293,19 +309,21 @@ class JwtSvidParseAndValidateTest {
|
|||
String name;
|
||||
JwtBundle jwtBundle;
|
||||
Set<String> audience;
|
||||
String hint;
|
||||
Supplier<String> generateToken;
|
||||
Exception expectedException;
|
||||
JwtSvid expectedJwtSvid;
|
||||
|
||||
@Builder
|
||||
public TestCase(String name, JwtBundle jwtBundle, Set<String> expectedAudience, Supplier<String> generateToken,
|
||||
Exception expectedException, JwtSvid expectedJwtSvid) {
|
||||
Exception expectedException, JwtSvid expectedJwtSvid, String hint) {
|
||||
this.name = name;
|
||||
this.jwtBundle = jwtBundle;
|
||||
this.audience = expectedAudience;
|
||||
this.generateToken = generateToken;
|
||||
this.expectedException = expectedException;
|
||||
this.expectedJwtSvid = expectedJwtSvid;
|
||||
this.hint = hint;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -38,10 +38,11 @@ class JwtSvidParseInsecureTest {
|
|||
void parseValidJwt(TestCase testCase) {
|
||||
try {
|
||||
String token = testCase.generateToken.get();
|
||||
JwtSvid jwtSvid = JwtSvid.parseInsecure(token, testCase.audience);
|
||||
JwtSvid jwtSvid = JwtSvid.parseInsecure(token, testCase.audience, testCase.hint);
|
||||
|
||||
assertEquals(testCase.expectedJwtSvid.getSpiffeId(), jwtSvid.getSpiffeId());
|
||||
assertEquals(testCase.expectedJwtSvid.getAudience(), jwtSvid.getAudience());
|
||||
assertEquals(testCase.expectedJwtSvid.getHint(), jwtSvid.getHint());
|
||||
assertEquals(testCase.expectedJwtSvid.getExpiry().toInstant().getEpochSecond(), jwtSvid.getExpiry().toInstant().getEpochSecond());
|
||||
assertEquals(token, jwtSvid.getToken());
|
||||
} catch (Exception e) {
|
||||
|
@ -112,6 +113,7 @@ class JwtSvidParseInsecureTest {
|
|||
|
||||
SpiffeId spiffeId = trustDomain.newSpiffeId("host");
|
||||
Date expiration = new Date(System.currentTimeMillis() + 3600000);
|
||||
Date issuedAt = new Date();
|
||||
Set<String> audience = Collections.singleton("audience");
|
||||
|
||||
JWTClaimsSet claims = TestUtils.buildJWTClaimSet(audience, spiffeId.toString(), expiration);
|
||||
|
@ -122,33 +124,48 @@ class JwtSvidParseInsecureTest {
|
|||
.expectedAudience(audience)
|
||||
.generateToken(() -> TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JWT))
|
||||
.expectedException(null)
|
||||
.hint("internal")
|
||||
.expectedJwtSvid(newJwtSvidInstance(
|
||||
trustDomain.newSpiffeId("host"),
|
||||
audience,
|
||||
issuedAt,
|
||||
expiration,
|
||||
claims.getClaims(), TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JWT)))
|
||||
claims.getClaims(),
|
||||
TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JWT),
|
||||
"internal"
|
||||
))
|
||||
.build()),
|
||||
Arguments.of(TestCase.builder()
|
||||
.name("using typ as JOSE")
|
||||
.expectedAudience(audience)
|
||||
.generateToken(() -> TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JOSE))
|
||||
.expectedException(null)
|
||||
.hint("external")
|
||||
.expectedJwtSvid(newJwtSvidInstance(
|
||||
trustDomain.newSpiffeId("host"),
|
||||
audience,
|
||||
issuedAt,
|
||||
expiration,
|
||||
claims.getClaims(), TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JWT)))
|
||||
claims.getClaims(),
|
||||
TestUtils.generateToken(claims, key1, "authority1", JwtSvid.HEADER_TYP_JWT),
|
||||
"external"
|
||||
))
|
||||
.build()),
|
||||
Arguments.of(TestCase.builder()
|
||||
.name("using empty typ")
|
||||
.expectedAudience(audience)
|
||||
.generateToken(() -> TestUtils.generateToken(claims, key1, "authority1", ""))
|
||||
.expectedException(null)
|
||||
.hint("")
|
||||
.expectedJwtSvid(newJwtSvidInstance(
|
||||
trustDomain.newSpiffeId("host"),
|
||||
audience,
|
||||
issuedAt,
|
||||
expiration,
|
||||
claims.getClaims(), TestUtils.generateToken(claims, key1, "authority1", "")))
|
||||
claims.getClaims(),
|
||||
TestUtils.generateToken(claims, key1, "authority1", ""),
|
||||
""
|
||||
))
|
||||
.build()));
|
||||
}
|
||||
|
||||
|
@ -217,30 +234,35 @@ class JwtSvidParseInsecureTest {
|
|||
static class TestCase {
|
||||
String name;
|
||||
Set<String> audience;
|
||||
String hint;
|
||||
Supplier<String> generateToken;
|
||||
Exception expectedException;
|
||||
JwtSvid expectedJwtSvid;
|
||||
|
||||
@Builder
|
||||
public TestCase(String name, Set<String> expectedAudience, Supplier<String> generateToken,
|
||||
Exception expectedException, JwtSvid expectedJwtSvid) {
|
||||
Exception expectedException, JwtSvid expectedJwtSvid, String hint) {
|
||||
this.name = name;
|
||||
this.audience = expectedAudience;
|
||||
this.generateToken = generateToken;
|
||||
this.expectedException = expectedException;
|
||||
this.expectedJwtSvid = expectedJwtSvid;
|
||||
this.hint = hint;
|
||||
}
|
||||
}
|
||||
|
||||
static JwtSvid newJwtSvidInstance(final SpiffeId spiffeId,
|
||||
final Set<String> audience,
|
||||
final Date issuedAt,
|
||||
final Date expiry,
|
||||
final Map<String, Object> claims,
|
||||
final String token) {
|
||||
final String token,
|
||||
final String hint
|
||||
) {
|
||||
val constructor = JwtSvid.class.getDeclaredConstructors()[0];
|
||||
constructor.setAccessible(true);
|
||||
try {
|
||||
return (JwtSvid) constructor.newInstance(spiffeId, audience, expiry, claims, token);
|
||||
return (JwtSvid) constructor.newInstance(spiffeId, audience, issuedAt, expiry, claims, token, hint);
|
||||
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
|
|
@ -48,9 +48,11 @@ class X509SvidTest {
|
|||
.name("1. Single certificate and key")
|
||||
.certsPath(certSingle)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedSpiffeId(SpiffeId.fromSegments(TrustDomain.parse("example.org"), "workload-1"))
|
||||
.expectedNumberOfCerts(1)
|
||||
.expectedPrivateKeyAlgorithm("RSA")
|
||||
.expectedHint("")
|
||||
.build()
|
||||
),
|
||||
Arguments.of(TestCase
|
||||
|
@ -58,9 +60,11 @@ class X509SvidTest {
|
|||
.name("2. Certificate with intermediate and key")
|
||||
.certsPath(certMultiple)
|
||||
.keyPath(keyECDSA)
|
||||
.hint("")
|
||||
.expectedSpiffeId(SpiffeId.fromSegments(TrustDomain.parse("example.org"), "workload-1"))
|
||||
.expectedNumberOfCerts(2)
|
||||
.expectedPrivateKeyAlgorithm("EC")
|
||||
.expectedHint("")
|
||||
.build()
|
||||
),
|
||||
Arguments.of(TestCase
|
||||
|
@ -68,7 +72,9 @@ class X509SvidTest {
|
|||
.name("3. Missing certificate")
|
||||
.certsPath(keyRSA)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Certificate could not be parsed from cert bytes")
|
||||
.expectedHint("")
|
||||
.build()
|
||||
),
|
||||
Arguments.of(TestCase
|
||||
|
@ -76,6 +82,7 @@ class X509SvidTest {
|
|||
.name("4. Missing key")
|
||||
.certsPath(certSingle)
|
||||
.keyPath(certSingle)
|
||||
.hint("")
|
||||
.expectedError("Private Key could not be parsed from key bytes")
|
||||
.build()
|
||||
),
|
||||
|
@ -84,6 +91,7 @@ class X509SvidTest {
|
|||
.name("5. Corrupted private key")
|
||||
.certsPath(certSingle)
|
||||
.keyPath(corrupted)
|
||||
.hint("")
|
||||
.expectedError("Private Key could not be parsed from key bytes")
|
||||
.build()
|
||||
),
|
||||
|
@ -92,6 +100,7 @@ class X509SvidTest {
|
|||
.name("6. Corrupted certificate")
|
||||
.certsPath(corrupted)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Certificate could not be parsed from cert bytes")
|
||||
.build()
|
||||
),
|
||||
|
@ -100,6 +109,7 @@ class X509SvidTest {
|
|||
.name("7. Certificate without SPIFFE ID")
|
||||
.certsPath(leafEmptyID)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Certificate does not contain SPIFFE ID in the URI SAN")
|
||||
.build()
|
||||
),
|
||||
|
@ -108,6 +118,7 @@ class X509SvidTest {
|
|||
.name("8. Leaf certificate with CA flag set to true")
|
||||
.certsPath(leafCAtrue)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Leaf certificate must not have CA flag set to true")
|
||||
.build()
|
||||
),
|
||||
|
@ -116,6 +127,7 @@ class X509SvidTest {
|
|||
.name("9. Leaf certificate without digitalSignature as key usage")
|
||||
.certsPath(leafNoDigitalSignature)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Leaf certificate must have 'digitalSignature' as key usage")
|
||||
.build()
|
||||
),
|
||||
|
@ -124,6 +136,7 @@ class X509SvidTest {
|
|||
.name("10. Leaf certificate with certSign as key usage")
|
||||
.certsPath(leafCertSign)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Leaf certificate must not have 'keyCertSign' as key usage")
|
||||
.build()
|
||||
),
|
||||
|
@ -132,6 +145,7 @@ class X509SvidTest {
|
|||
.name("11. Leaf certificate with cRLSign as key usage")
|
||||
.certsPath(leafCRLSign)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Leaf certificate must not have 'cRLSign' as key usage")
|
||||
.build()
|
||||
),
|
||||
|
@ -140,6 +154,7 @@ class X509SvidTest {
|
|||
.name("12. Signing certificate without CA flag")
|
||||
.certsPath(signNoCA)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Signing certificate must have CA flag set to true")
|
||||
.build()
|
||||
),
|
||||
|
@ -148,8 +163,21 @@ class X509SvidTest {
|
|||
.name("13. Signing certificate without CA flag")
|
||||
.certsPath(signNoKeyCertSign)
|
||||
.keyPath(keyRSA)
|
||||
.hint("")
|
||||
.expectedError("Signing certificate must have 'keyCertSign' as key usage")
|
||||
.build()
|
||||
),
|
||||
Arguments.of(TestCase
|
||||
.builder()
|
||||
.name("14. SVID with non-empty hint")
|
||||
.certsPath(certSingle)
|
||||
.keyPath(keyRSA)
|
||||
.hint("internal")
|
||||
.expectedSpiffeId(SpiffeId.fromSegments(TrustDomain.parse("example.org"), "workload-1"))
|
||||
.expectedNumberOfCerts(1)
|
||||
.expectedPrivateKeyAlgorithm("RSA")
|
||||
.expectedHint("internal")
|
||||
.build()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -176,8 +204,10 @@ class X509SvidTest {
|
|||
byte[] keyBytes = Files.readAllBytes(keyPath);
|
||||
|
||||
try {
|
||||
X509Svid x509Svid = X509Svid.parseRaw(certBytes, keyBytes);
|
||||
X509Svid x509Svid = X509Svid.parseRaw(certBytes, keyBytes, "external");
|
||||
assertEquals("spiffe://example.org/workload-server", x509Svid.getSpiffeId().toString());
|
||||
assertEquals("external", x509Svid.getHint());
|
||||
|
||||
} catch (X509SvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
|
@ -296,7 +326,7 @@ class X509SvidTest {
|
|||
byte[] certBytes = Files.readAllBytes(certPath);
|
||||
byte[] keyBytes = Files.readAllBytes(keyPath);
|
||||
|
||||
X509Svid x509Svid = X509Svid.parse(certBytes, keyBytes);
|
||||
X509Svid x509Svid = X509Svid.parse(certBytes, keyBytes, testCase.getHint());
|
||||
|
||||
if (StringUtils.isNotBlank(testCase.expectedError)) {
|
||||
fail(String.format("Error was expected: %s", testCase.expectedError));
|
||||
|
@ -309,6 +339,7 @@ class X509SvidTest {
|
|||
assertEquals(testCase.expectedNumberOfCerts, x509Svid.getChain().size());
|
||||
assertEquals(testCase.expectedSpiffeId, x509Svid.getSpiffeId());
|
||||
assertEquals(testCase.expectedPrivateKeyAlgorithm, x509Svid.getPrivateKey().getAlgorithm());
|
||||
assertEquals(testCase.expectedHint, x509Svid.getHint());
|
||||
|
||||
} catch (Exception e) {
|
||||
if (StringUtils.isBlank(testCase.expectedError)) {
|
||||
|
@ -322,21 +353,25 @@ class X509SvidTest {
|
|||
static class TestCase {
|
||||
String name;
|
||||
String certsPath;
|
||||
String hint;
|
||||
String keyPath;
|
||||
SpiffeId expectedSpiffeId;
|
||||
int expectedNumberOfCerts;
|
||||
String expectedPrivateKeyAlgorithm;
|
||||
String expectedHint;
|
||||
String expectedError;
|
||||
|
||||
@Builder
|
||||
public TestCase(String name, String certsPath, String keyPath, SpiffeId expectedSpiffeId, int expectedNumberOfCerts, String expectedPrivateKeyAlgorithm, String expectedError) {
|
||||
public TestCase(String name, String certsPath, String keyPath, String hint, SpiffeId expectedSpiffeId, int expectedNumberOfCerts, String expectedPrivateKeyAlgorithm, String expectedHint, String expectedError) {
|
||||
this.name = name;
|
||||
this.certsPath = certsPath;
|
||||
this.keyPath = keyPath;
|
||||
this.hint = hint;
|
||||
this.expectedSpiffeId = expectedSpiffeId;
|
||||
this.expectedNumberOfCerts = expectedNumberOfCerts;
|
||||
this.expectedPrivateKeyAlgorithm = expectedPrivateKeyAlgorithm;
|
||||
this.expectedError = expectedError;
|
||||
this.expectedHint = expectedHint;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import uk.org.webcompere.systemstubs.environment.EnvironmentVariables;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.stream.Stream;
|
||||
|
@ -74,19 +75,24 @@ public class AddressTest {
|
|||
|
||||
@Test
|
||||
void getDefaultAddress() throws Exception {
|
||||
TestUtils.setEnvironmentVariable(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/test" );
|
||||
String defaultAddress = Address.getDefaultAddress();
|
||||
assertEquals("unix:/tmp/test", defaultAddress);
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/test").execute(() -> {
|
||||
String defaultAddress = Address.getDefaultAddress();
|
||||
assertEquals("unix:/tmp/test", defaultAddress);
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
void getDefaultAddress_isBlankThrowsException() throws Exception {
|
||||
TestUtils.setEnvironmentVariable(Address.SOCKET_ENV_VARIABLE, "");
|
||||
try {
|
||||
Address.getDefaultAddress();
|
||||
fail();
|
||||
} catch (Exception e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "").execute(() -> {
|
||||
try {
|
||||
Address.getDefaultAddress();
|
||||
fail();
|
||||
} catch (Exception e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
});
|
||||
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "").execute(() -> {
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,523 @@
|
|||
package io.spiffe.workloadapi;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import io.spiffe.bundle.jwtbundle.JwtBundle;
|
||||
import io.spiffe.exception.BundleNotFoundException;
|
||||
import io.spiffe.exception.JwtSourceException;
|
||||
import io.spiffe.exception.JwtSvidException;
|
||||
import io.spiffe.exception.SocketEndpointAddressException;
|
||||
import io.spiffe.spiffeid.SpiffeId;
|
||||
import io.spiffe.spiffeid.TrustDomain;
|
||||
import io.spiffe.svid.jwtsvid.JwtSvid;
|
||||
import lombok.val;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import uk.org.webcompere.systemstubs.environment.EnvironmentVariables;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
import static io.spiffe.workloadapi.WorkloadApiClientStub.JWT_TTL;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class CachedJwtSourceTest {
|
||||
private CachedJwtSource jwtSource;
|
||||
private WorkloadApiClientStub workloadApiClient;
|
||||
private WorkloadApiClientErrorStub workloadApiClientErrorStub;
|
||||
private Clock clock;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() throws JwtSourceException, SocketEndpointAddressException {
|
||||
workloadApiClient = new WorkloadApiClientStub();
|
||||
JwtSourceOptions options = JwtSourceOptions.builder().workloadApiClient(workloadApiClient).build();
|
||||
System.setProperty(CachedJwtSource.TIMEOUT_SYSTEM_PROPERTY, "PT1S");
|
||||
jwtSource = (CachedJwtSource) CachedJwtSource.newSource(options);
|
||||
workloadApiClientErrorStub = new WorkloadApiClientErrorStub();
|
||||
|
||||
clock = Clock.fixed(Instant.now(), ZoneId.systemDefault());
|
||||
workloadApiClient.setClock(clock);
|
||||
jwtSource.setClock(clock);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void tearDown() throws IOException {
|
||||
jwtSource.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetBundleForTrustDomain() {
|
||||
try {
|
||||
JwtBundle bundle = jwtSource.getBundleForTrustDomain(TrustDomain.parse("example.org"));
|
||||
assertNotNull(bundle);
|
||||
assertEquals(TrustDomain.parse("example.org"), bundle.getTrustDomain());
|
||||
} catch (BundleNotFoundException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetBundleForTrustDomain_nullParam() {
|
||||
try {
|
||||
jwtSource.getBundleForTrustDomain(null);
|
||||
fail();
|
||||
} catch (NullPointerException e) {
|
||||
assertEquals("trustDomain is marked non-null but is null", e.getMessage());
|
||||
} catch (BundleNotFoundException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetBundleForTrustDomain_SourceIsClosed_ThrowsIllegalStateException() throws IOException {
|
||||
jwtSource.close();
|
||||
try {
|
||||
jwtSource.getBundleForTrustDomain(TrustDomain.parse("example.org"));
|
||||
fail("expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("JWT bundle source is closed", e.getMessage());
|
||||
assertTrue(workloadApiClient.closed);
|
||||
} catch (BundleNotFoundException e) {
|
||||
fail("not expected exception", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidWithSubject() {
|
||||
try {
|
||||
JwtSvid svid = jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidWithSubject_ReturnFromCache() {
|
||||
try {
|
||||
JwtSvid svid = jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud3", "aud2", "aud1");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again to get from cache changing the order of the audiences
|
||||
svid = jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again using different subject
|
||||
svid = jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/extra-workload-server"), "aud2", "aud3", "aud1");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/extra-workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(2, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again using the same audiences
|
||||
svid = jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/extra-workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/extra-workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(2, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidWithSubject_JwtSvidExpiredInCache() {
|
||||
try {
|
||||
JwtSvid svid = jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// set clock forwards but not enough to expire the JWT SVID in the cache
|
||||
jwtSource.setClock(clock.offset(clock, JWT_TTL.dividedBy(2).minus(Duration.ofSeconds(1))));
|
||||
|
||||
// call again to get from cache, fetchJwtSvid call count should not change
|
||||
svid = jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// set clock to expire the JWT SVID in the cache
|
||||
jwtSource.setClock(clock.offset(clock, JWT_TTL.dividedBy(2).plus(Duration.ofSeconds(1))));
|
||||
|
||||
// call again, fetchJwtSvid call count should increase
|
||||
svid = jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(2, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidWithSubject_JwtSvidExpiredInCache_MultipleThreads() {
|
||||
// test fetchJwtSvid with several threads trying to read and write the cache
|
||||
// at the same time, the cache should be updated only once
|
||||
try {
|
||||
|
||||
jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// set clock to expire the JWT SVID in the cache
|
||||
Clock offset = Clock.offset(clock, JWT_TTL.dividedBy(2).plus(Duration.ofSeconds(1)));
|
||||
jwtSource.setClock(offset);
|
||||
workloadApiClient.setClock(offset);
|
||||
|
||||
// create a thread pool with 10 threads
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(10);
|
||||
|
||||
List<Future<JwtSvid>> futures = new ArrayList<>();
|
||||
|
||||
// create 10 tasks to fetch a JWT SVID
|
||||
for (int i = 0; i < 10; i++) {
|
||||
futures.add(executorService.submit(() -> jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3")));
|
||||
}
|
||||
|
||||
// wait for all tasks to finish
|
||||
for (Future<JwtSvid> future : futures) {
|
||||
future.get();
|
||||
}
|
||||
|
||||
// verify that the cache was updated only once after the JWT SVID expired
|
||||
assertEquals(2, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
} catch (InterruptedException | ExecutionException | JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidWithoutSubject() {
|
||||
try {
|
||||
JwtSvid svid = jwtSource.fetchJwtSvid("aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidWithoutSubject_ReturnFromCache() {
|
||||
try {
|
||||
JwtSvid svid = jwtSource.fetchJwtSvid("aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again to get from cache changing the order of the audiences, the call count should not change
|
||||
svid = jwtSource.fetchJwtSvid("aud3", "aud2", "aud1");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again using different audience, the call count should increase
|
||||
svid = jwtSource.fetchJwtSvid("other-audience");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("other-audience"), svid.getAudience());
|
||||
assertEquals(2, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidWithoutSubject_JwtSvidExpiredInCache() {
|
||||
try {
|
||||
JwtSvid svid = jwtSource.fetchJwtSvid("aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// set clock forwards but not enough to expire the JWT SVID in the cache
|
||||
jwtSource.setClock(clock.offset(clock, JWT_TTL.dividedBy(2).minus(Duration.ofSeconds(1))));
|
||||
|
||||
// call again to get from cache, fetchJwtSvid call count should not change
|
||||
svid = jwtSource.fetchJwtSvid("aud3", "aud2", "aud1");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// set clock forwards to expire the JWT SVID in the cache
|
||||
jwtSource.setClock(clock.offset(clock, JWT_TTL.dividedBy(2).plus(Duration.ofSeconds(1))));
|
||||
|
||||
// call again, fetchJwtSvid call count should increase
|
||||
svid = jwtSource.fetchJwtSvid("aud1", "aud2", "aud3");
|
||||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals(2, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvid_SourceIsClosed_ThrowsIllegalStateException() throws IOException {
|
||||
jwtSource.close();
|
||||
try {
|
||||
jwtSource.fetchJwtSvid("aud1", "aud2", "aud3");
|
||||
fail("expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("JWT SVID source is closed", e.getMessage());
|
||||
assertTrue(workloadApiClient.closed);
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidWithSubject_SourceIsClosed_ThrowsIllegalStateException() throws IOException {
|
||||
jwtSource.close();
|
||||
try {
|
||||
jwtSource.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
fail("expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("JWT SVID source is closed", e.getMessage());
|
||||
assertTrue(workloadApiClient.closed);
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsWithSubject() {
|
||||
try {
|
||||
List<JwtSvid> svids = jwtSource.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svids);
|
||||
assertEquals(1, svids.size());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(0).getAudience());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsWithSubject_ReturnFromCache() {
|
||||
try {
|
||||
List<JwtSvid> svids = jwtSource.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svids);
|
||||
assertEquals(1, svids.size());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(0).getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again to get from cache changing the order of the audiences
|
||||
svids = jwtSource.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svids);
|
||||
assertEquals(1, svids.size());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud3", "aud2", "aud1"), svids.get(0).getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again using different audience
|
||||
svids = jwtSource.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/workload-server"), "other-audience");
|
||||
assertNotNull(svids);
|
||||
assertEquals(1, svids.size());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("other-audience"), svids.get(0).getAudience());
|
||||
assertEquals(2, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsWithoutSubject() {
|
||||
try {
|
||||
List<JwtSvid> svids = jwtSource.fetchJwtSvids("aud1", "aud2", "aud3");
|
||||
assertNotNull(svids);
|
||||
assertEquals(svids.size(), 2);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(0).getAudience());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/extra-workload-server"), svids.get(1).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(1).getAudience());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsWithoutSubject_ReturnFromCache() {
|
||||
try {
|
||||
List<JwtSvid> svids = jwtSource.fetchJwtSvids("aud1", "aud2", "aud3");
|
||||
assertNotNull(svids);
|
||||
assertEquals(svids.size(), 2);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(0).getAudience());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/extra-workload-server"), svids.get(1).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(1).getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again to get from cache changing the order of the audiences
|
||||
svids = jwtSource.fetchJwtSvids("aud2", "aud3", "aud1");
|
||||
assertNotNull(svids);
|
||||
assertEquals(svids.size(), 2);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(0).getAudience());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/extra-workload-server"), svids.get(1).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(1).getAudience());
|
||||
assertEquals(1, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
|
||||
// call again using different audience
|
||||
svids = jwtSource.fetchJwtSvids("other-audience");
|
||||
assertNotNull(svids);
|
||||
assertEquals(svids.size(), 2);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("other-audience"), svids.get(0).getAudience());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/extra-workload-server"), svids.get(1).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("other-audience"), svids.get(1).getAudience());
|
||||
assertEquals(2, workloadApiClient.getFetchJwtSvidCallCount());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvids_SourceIsClosed_ThrowsIllegalStateException() throws IOException {
|
||||
jwtSource.close();
|
||||
try {
|
||||
jwtSource.fetchJwtSvids("aud1", "aud2", "aud3");
|
||||
fail("expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("JWT SVID source is closed", e.getMessage());
|
||||
assertTrue(workloadApiClient.closed);
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsWithSubject_SourceIsClosed_ThrowsIllegalStateException() throws IOException {
|
||||
jwtSource.close();
|
||||
try {
|
||||
jwtSource.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
fail("expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("JWT SVID source is closed", e.getMessage());
|
||||
assertTrue(workloadApiClient.closed);
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_success() {
|
||||
val options = JwtSourceOptions
|
||||
.builder()
|
||||
.workloadApiClient(workloadApiClient)
|
||||
.initTimeout(Duration.ofSeconds(0))
|
||||
.build();
|
||||
try {
|
||||
JwtSource jwtSource = CachedJwtSource.newSource(options);
|
||||
assertNotNull(jwtSource);
|
||||
} catch (SocketEndpointAddressException | JwtSourceException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_nullParam() {
|
||||
try {
|
||||
CachedJwtSource.newSource(null);
|
||||
fail();
|
||||
} catch (NullPointerException e) {
|
||||
assertEquals("options is marked non-null but is null", e.getMessage());
|
||||
} catch (SocketEndpointAddressException | JwtSourceException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_errorFetchingJwtBundles() {
|
||||
val options = JwtSourceOptions
|
||||
.builder()
|
||||
.workloadApiClient(workloadApiClientErrorStub)
|
||||
.spiffeSocketPath("unix:/tmp/test")
|
||||
.build();
|
||||
try {
|
||||
CachedJwtSource.newSource(options);
|
||||
fail();
|
||||
} catch (JwtSourceException e) {
|
||||
assertEquals("Error creating JWT source", e.getMessage());
|
||||
assertEquals("Error fetching JwtBundleSet", e.getCause().getMessage());
|
||||
} catch (Exception e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_FailsBecauseOfTimeOut() throws Exception {
|
||||
try {
|
||||
val options = JwtSourceOptions
|
||||
.builder()
|
||||
.spiffeSocketPath("unix:/tmp/test")
|
||||
.build();
|
||||
CachedJwtSource.newSource(options);
|
||||
fail();
|
||||
} catch (JwtSourceException e) {
|
||||
assertEquals("Error creating JWT source", e.getMessage());
|
||||
assertEquals("Timeout waiting for JWT bundles update", e.getCause().getMessage());
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_DefaultSocketAddress() throws Exception {
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/test").execute(() -> {
|
||||
try {
|
||||
CachedJwtSource.newSource();
|
||||
fail();
|
||||
} catch (JwtSourceException e) {
|
||||
assertEquals("Error creating JWT source", e.getMessage());
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_noSocketAddress() throws Exception {
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "").execute(() -> {
|
||||
try {
|
||||
CachedJwtSource.newSource();
|
||||
fail();
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail();
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -14,16 +14,18 @@ import lombok.val;
|
|||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import uk.org.webcompere.systemstubs.environment.EnvironmentVariables;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
class JwtSourceTest {
|
||||
class DefaultJwtSourceTest {
|
||||
|
||||
private JwtSource jwtSource;
|
||||
private WorkloadApiClientStub workloadApiClient;
|
||||
|
@ -32,7 +34,7 @@ class JwtSourceTest {
|
|||
@BeforeEach
|
||||
void setUp() throws JwtSourceException, SocketEndpointAddressException {
|
||||
workloadApiClient = new WorkloadApiClientStub();
|
||||
DefaultJwtSource.JwtSourceOptions options = DefaultJwtSource.JwtSourceOptions.builder().workloadApiClient(workloadApiClient).build();
|
||||
JwtSourceOptions options = JwtSourceOptions.builder().workloadApiClient(workloadApiClient).build();
|
||||
System.setProperty(DefaultJwtSource.TIMEOUT_SYSTEM_PROPERTY, "PT1S");
|
||||
jwtSource = DefaultJwtSource.newSource(options);
|
||||
workloadApiClientErrorStub = new WorkloadApiClientErrorStub();
|
||||
|
@ -87,6 +89,7 @@ class JwtSourceTest {
|
|||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals("external", svid.getHint());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
|
@ -99,6 +102,7 @@ class JwtSourceTest {
|
|||
assertNotNull(svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svid.getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svid.getAudience());
|
||||
assertEquals("external", svid.getHint());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
|
@ -132,9 +136,67 @@ class JwtSourceTest {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsWithSubject() {
|
||||
try {
|
||||
List<JwtSvid> svids = jwtSource.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(svids);
|
||||
assertEquals(svids.size(), 1);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(0).getAudience());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsWithoutSubject() {
|
||||
try {
|
||||
List<JwtSvid> svids = jwtSource.fetchJwtSvids("aud1", "aud2", "aud3");
|
||||
assertNotNull(svids);
|
||||
assertEquals(2, svids.size());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), svids.get(0).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(0).getAudience());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/extra-workload-server"), svids.get(1).getSpiffeId());
|
||||
assertEquals(Sets.newHashSet("aud1", "aud2", "aud3"), svids.get(1).getAudience());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvids_SourceIsClosed_ThrowsIllegalStateException() throws IOException {
|
||||
jwtSource.close();
|
||||
try {
|
||||
jwtSource.fetchJwtSvids("aud1", "aud2", "aud3");
|
||||
fail("expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("JWT SVID source is closed", e.getMessage());
|
||||
assertTrue(workloadApiClient.closed);
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsWithSubject_SourceIsClosed_ThrowsIllegalStateException() throws IOException {
|
||||
jwtSource.close();
|
||||
try {
|
||||
jwtSource.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/workload-server"), "aud1", "aud2", "aud3");
|
||||
fail("expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("JWT SVID source is closed", e.getMessage());
|
||||
assertTrue(workloadApiClient.closed);
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_success() {
|
||||
val options = DefaultJwtSource.JwtSourceOptions
|
||||
val options = JwtSourceOptions
|
||||
.builder()
|
||||
.workloadApiClient(workloadApiClient)
|
||||
.initTimeout(Duration.ofSeconds(0))
|
||||
|
@ -161,7 +223,7 @@ class JwtSourceTest {
|
|||
|
||||
@Test
|
||||
void newSource_errorFetchingJwtBundles() {
|
||||
val options = DefaultJwtSource.JwtSourceOptions
|
||||
val options = JwtSourceOptions
|
||||
.builder()
|
||||
.workloadApiClient(workloadApiClientErrorStub)
|
||||
.spiffeSocketPath("unix:/tmp/test")
|
||||
|
@ -180,7 +242,7 @@ class JwtSourceTest {
|
|||
@Test
|
||||
void newSource_FailsBecauseOfTimeOut() throws Exception {
|
||||
try {
|
||||
val options = DefaultJwtSource.JwtSourceOptions
|
||||
val options = JwtSourceOptions
|
||||
.builder()
|
||||
.spiffeSocketPath("unix:/tmp/test")
|
||||
.build();
|
||||
|
@ -196,28 +258,29 @@ class JwtSourceTest {
|
|||
|
||||
@Test
|
||||
void newSource_DefaultSocketAddress() throws Exception {
|
||||
try {
|
||||
TestUtils.setEnvironmentVariable(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/test");
|
||||
DefaultJwtSource.newSource();
|
||||
fail();
|
||||
} catch (JwtSourceException e) {
|
||||
assertEquals("Error creating JWT source", e.getMessage());
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail();
|
||||
}
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/test").execute(() -> {
|
||||
try {
|
||||
DefaultJwtSource.newSource();
|
||||
fail();
|
||||
} catch (JwtSourceException e) {
|
||||
assertEquals("Error creating JWT source", e.getMessage());
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_noSocketAddress() throws Exception {
|
||||
try {
|
||||
// just in case it's defined in the environment
|
||||
TestUtils.setEnvironmentVariable(Address.SOCKET_ENV_VARIABLE, "");
|
||||
DefaultJwtSource.newSource();
|
||||
fail();
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail();
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "").execute(() -> {
|
||||
try {
|
||||
DefaultJwtSource.newSource();
|
||||
fail();
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail();
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -116,6 +116,28 @@ class DefaultWorkloadApiClientEmptyResponseTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvids_throwsJwtSvidException() {
|
||||
try {
|
||||
workloadApiClient.fetchJwtSvids("aud1", "aud2");
|
||||
fail();
|
||||
} catch (JwtSvidException e) {
|
||||
assertEquals("Error fetching JWT SVID", e.getMessage());
|
||||
assertEquals("JWT SVID response from the Workload API is empty", e.getCause().getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsPassingSpiffeId_throwsJwtSvidException() {
|
||||
try {
|
||||
workloadApiClient.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/test"), "aud1", "aud2");
|
||||
fail();
|
||||
} catch (JwtSvidException e) {
|
||||
assertEquals("Error fetching JWT SVID", e.getMessage());
|
||||
assertEquals("JWT SVID response from the Workload API is empty", e.getCause().getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testValidateJwtSvid_throwsJwtSvidException() {
|
||||
try {
|
||||
|
|
|
@ -115,6 +115,26 @@ class DefaultWorkloadApiClientInvalidArgumentTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvids_throwsJwtSvidException() {
|
||||
try {
|
||||
workloadApiClient.fetchJwtSvids("aud1", "aud2");
|
||||
fail();
|
||||
} catch (JwtSvidException e) {
|
||||
assertEquals("Error fetching JWT SVID", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsPassingSpiffeId_throwsJwtSvidException() {
|
||||
try {
|
||||
workloadApiClient.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/test"), "aud1", "aud2");
|
||||
fail();
|
||||
} catch (JwtSvidException e) {
|
||||
assertEquals("Error fetching JWT SVID", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testValidateJwtSvid_throwsJwtSvidException() {
|
||||
try {
|
||||
|
|
|
@ -20,6 +20,7 @@ import org.junit.Rule;
|
|||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import uk.org.webcompere.systemstubs.environment.EnvironmentVariables;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.KeyPair;
|
||||
|
@ -54,13 +55,14 @@ class DefaultWorkloadApiClientTest {
|
|||
|
||||
@Test
|
||||
void testNewClient_defaultOptions() throws Exception {
|
||||
try {
|
||||
TestUtils.setEnvironmentVariable(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/agent.sock" );
|
||||
WorkloadApiClient client = DefaultWorkloadApiClient.newClient();
|
||||
assertNotNull(client);
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail(e);
|
||||
}
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/agent.sock").execute(() -> {
|
||||
try {
|
||||
WorkloadApiClient client = DefaultWorkloadApiClient.newClient();
|
||||
assertNotNull(client);
|
||||
} catch (SocketEndpointAddressException e) {
|
||||
fail(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -100,6 +102,7 @@ class DefaultWorkloadApiClientTest {
|
|||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), x509Context.getDefaultSvid().getSpiffeId());
|
||||
assertNotNull(x509Context.getDefaultSvid().getChain());
|
||||
assertNotNull(x509Context.getDefaultSvid().getPrivateKey());
|
||||
assertEquals("external", x509Context.getDefaultSvid().getHint());
|
||||
assertNotNull(x509Context.getX509BundleSet());
|
||||
try {
|
||||
X509Bundle bundle = x509Context.getX509BundleSet().getBundleForTrustDomain(TrustDomain.parse("example.org"));
|
||||
|
@ -134,6 +137,7 @@ class DefaultWorkloadApiClientTest {
|
|||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), update.getDefaultSvid().getSpiffeId());
|
||||
assertNotNull(update.getDefaultSvid().getChain());
|
||||
assertNotNull(update.getDefaultSvid().getPrivateKey());
|
||||
assertEquals("external", update.getDefaultSvid().getHint());
|
||||
assertNotNull(update.getX509BundleSet());
|
||||
try {
|
||||
X509Bundle bundle = update.getX509BundleSet().getBundleForTrustDomain(TrustDomain.parse("example.org"));
|
||||
|
@ -225,6 +229,7 @@ class DefaultWorkloadApiClientTest {
|
|||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), jwtSvid.getSpiffeId());
|
||||
assertTrue(jwtSvid.getAudience().contains("aud1"));
|
||||
assertEquals(3, jwtSvid.getAudience().size());
|
||||
assertEquals("external", jwtSvid.getHint());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
|
@ -238,6 +243,7 @@ class DefaultWorkloadApiClientTest {
|
|||
assertEquals(SpiffeId.parse("spiffe://example.org/test"), jwtSvid.getSpiffeId());
|
||||
assertTrue(jwtSvid.getAudience().contains("aud1"));
|
||||
assertEquals(3, jwtSvid.getAudience().size());
|
||||
assertEquals("external", jwtSvid.getHint());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
|
@ -279,6 +285,77 @@ class DefaultWorkloadApiClientTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvids() {
|
||||
try {
|
||||
List<JwtSvid> jwtSvids = workloadApiClient.fetchJwtSvids("aud1", "aud2", "aud3");
|
||||
System.out.println(jwtSvids.toString());
|
||||
assertNotNull(jwtSvids);
|
||||
assertEquals(jwtSvids.size(), 2);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), jwtSvids.get(0).getSpiffeId());
|
||||
assertTrue(jwtSvids.get(0).getAudience().contains("aud1"));
|
||||
assertEquals(3, jwtSvids.get(0).getAudience().size());
|
||||
assertEquals("external", jwtSvids.get(0).getHint());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/extra-workload-server"), jwtSvids.get(1).getSpiffeId());
|
||||
assertTrue(jwtSvids.get(1).getAudience().contains("aud1"));
|
||||
assertEquals(3, jwtSvids.get(1).getAudience().size());
|
||||
assertEquals("", jwtSvids.get(1).getHint());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvidsPassingSpiffeId() {
|
||||
try {
|
||||
List<JwtSvid> jwtSvids = workloadApiClient.fetchJwtSvids(SpiffeId.parse("spiffe://example.org/test"), "aud1", "aud2", "aud3");
|
||||
assertNotNull(jwtSvids);
|
||||
assertEquals(jwtSvids.size(), 1);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/test"), jwtSvids.get(0).getSpiffeId());
|
||||
assertTrue(jwtSvids.get(0).getAudience().contains("aud1"));
|
||||
assertEquals(3, jwtSvids.get(0).getAudience().size());
|
||||
assertEquals("external", jwtSvids.get(0).getHint());
|
||||
} catch (JwtSvidException e) {
|
||||
fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvids_nullAudience() {
|
||||
try {
|
||||
workloadApiClient.fetchJwtSvid(null, new String[]{"aud2", "aud3"});
|
||||
fail();
|
||||
} catch (NullPointerException e) {
|
||||
assertEquals("audience is marked non-null but is null", e.getMessage());
|
||||
} catch (JwtSvidException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvids_withSpiffeIdAndNullAudience() {
|
||||
try {
|
||||
workloadApiClient.fetchJwtSvid(SpiffeId.parse("spiffe://example.org/text"), null, "aud2", "aud3");
|
||||
fail();
|
||||
} catch (NullPointerException e) {
|
||||
assertEquals("audience is marked non-null but is null", e.getMessage());
|
||||
} catch (JwtSvidException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJwtSvids_nullSpiffeId() {
|
||||
try {
|
||||
workloadApiClient.fetchJwtSvid(null, "aud1", new String[]{"aud2", "aud3"});
|
||||
fail();
|
||||
} catch (NullPointerException e) {
|
||||
assertEquals("subject is marked non-null but is null", e.getMessage());
|
||||
} catch (JwtSvidException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testValidateJwtSvid() {
|
||||
String token = generateToken("spiffe://example.org/workload-server", Collections.singletonList("aud1"));
|
||||
|
@ -348,6 +425,7 @@ class DefaultWorkloadApiClientTest {
|
|||
done.countDown();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(Throwable e) {
|
||||
}
|
||||
|
|
|
@ -7,11 +7,11 @@ import io.spiffe.exception.X509SourceException;
|
|||
import io.spiffe.spiffeid.SpiffeId;
|
||||
import io.spiffe.spiffeid.TrustDomain;
|
||||
import io.spiffe.svid.x509svid.X509Svid;
|
||||
import io.spiffe.utils.TestUtils;
|
||||
import lombok.val;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import uk.org.webcompere.systemstubs.environment.EnvironmentVariables;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
|
@ -82,7 +82,8 @@ class DefaultX509SourceTest {
|
|||
void testGetX509Svid() {
|
||||
X509Svid x509Svid = x509Source.getX509Svid();
|
||||
assertNotNull(x509Svid);
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"),x509Svid.getSpiffeId());
|
||||
assertEquals(SpiffeId.parse("spiffe://example.org/workload-server"), x509Svid.getSpiffeId());
|
||||
assertEquals("internal", x509Svid.getHint());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -123,6 +124,7 @@ class DefaultX509SourceTest {
|
|||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void newSource_timeout() throws Exception {
|
||||
try {
|
||||
|
@ -160,15 +162,15 @@ class DefaultX509SourceTest {
|
|||
|
||||
@Test
|
||||
void newSource_noSocketAddress() throws Exception {
|
||||
try {
|
||||
// just in case the variable is defined in the environment
|
||||
TestUtils.setEnvironmentVariable(Address.SOCKET_ENV_VARIABLE, "");
|
||||
DefaultX509Source.newSource();
|
||||
fail();
|
||||
} catch (X509SourceException | SocketEndpointAddressException e) {
|
||||
fail();
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "").execute(() -> {
|
||||
try {
|
||||
DefaultX509Source.newSource();
|
||||
fail();
|
||||
} catch (X509SourceException | SocketEndpointAddressException e) {
|
||||
fail();
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -66,11 +66,23 @@ class FakeWorkloadApi extends SpiffeWorkloadAPIImplBase {
|
|||
.setX509Svid(svidByteString)
|
||||
.setX509SvidKey(keyByteString)
|
||||
.setBundle(bundleByteString)
|
||||
.setHint("external")
|
||||
.build();
|
||||
|
||||
// This X.509-SVID should be filtered out by the client because it has a non-unique hint and is not the first X.509-SVID in the response with this hint.
|
||||
Workload.X509SVID skippedSVID = Workload.X509SVID
|
||||
.newBuilder()
|
||||
.setSpiffeId("spiffe://example.org/this0-should-be-filtered-out")
|
||||
.setX509Svid(svidByteString)
|
||||
.setX509SvidKey(keyByteString)
|
||||
.setBundle(bundleByteString)
|
||||
.setHint("external")
|
||||
.build();
|
||||
|
||||
Workload.X509SVIDResponse response = Workload.X509SVIDResponse
|
||||
.newBuilder()
|
||||
.addSvids(svid)
|
||||
.addSvids(skippedSVID)
|
||||
.putFederatedBundles(TrustDomain.parse("domain.test").getName(), federatedByteString)
|
||||
.build();
|
||||
|
||||
|
@ -108,25 +120,59 @@ class FakeWorkloadApi extends SpiffeWorkloadAPIImplBase {
|
|||
@Override
|
||||
public void fetchJWTSVID(Workload.JWTSVIDRequest request, StreamObserver<Workload.JWTSVIDResponse> responseObserver) {
|
||||
String spiffeId = request.getSpiffeId();
|
||||
String extraSpiffeId = "spiffe://example.org/extra-workload-server";
|
||||
String skippedSpiffeId = "spiffe://example.org/this-should-be-filtered-out";
|
||||
boolean firstOnly = true;
|
||||
if (StringUtils.isBlank(spiffeId)) {
|
||||
firstOnly = false;
|
||||
spiffeId = "spiffe://example.org/workload-server";
|
||||
}
|
||||
Date expiration = new Date(System.currentTimeMillis() + 3600000);
|
||||
|
||||
Map<String, Object> claims = new HashMap<>();
|
||||
claims.put("sub", spiffeId);
|
||||
claims.put("aud", getAudienceList(request.getAudienceList()));
|
||||
Date expiration = new Date(System.currentTimeMillis() + 3600000);
|
||||
claims.put("exp", expiration);
|
||||
|
||||
Map<String, Object> extraClaims = new HashMap<>();
|
||||
extraClaims.put("sub", extraSpiffeId);
|
||||
extraClaims.put("aud", getAudienceList(request.getAudienceList()));
|
||||
extraClaims.put("exp", expiration);
|
||||
|
||||
KeyPair keyPair = TestUtils.generateECKeyPair(Curve.P_521);
|
||||
|
||||
String token = TestUtils.generateToken(claims, keyPair, "authority1");
|
||||
String extraToken = TestUtils.generateToken(extraClaims, keyPair, "authority1");
|
||||
|
||||
Workload.JWTSVID jwtsvid = Workload.JWTSVID
|
||||
.newBuilder()
|
||||
.setSpiffeId(spiffeId)
|
||||
.setSvid(token)
|
||||
.setHint("external")
|
||||
.build();
|
||||
Workload.JWTSVIDResponse response = Workload.JWTSVIDResponse.newBuilder().addSvids(jwtsvid).build();
|
||||
|
||||
Workload.JWTSVID extraJwtsvid = Workload.JWTSVID
|
||||
.newBuilder()
|
||||
.setSpiffeId(extraSpiffeId)
|
||||
.setSvid(extraToken)
|
||||
.build();
|
||||
|
||||
// This JWT-SVID should be filtered out by the client because it has a non-unique hint and is not the first JWT-SVID in the response with this hint.
|
||||
Workload.JWTSVID skippedJWTSVID = Workload.JWTSVID
|
||||
.newBuilder()
|
||||
.setSpiffeId(skippedSpiffeId)
|
||||
.setSvid(extraToken)
|
||||
.setHint("external")
|
||||
.build();
|
||||
|
||||
Workload.JWTSVIDResponse.Builder builder = Workload.JWTSVIDResponse.newBuilder();
|
||||
builder.addSvids(jwtsvid);
|
||||
builder.addSvids(skippedJWTSVID);
|
||||
if (!firstOnly) {
|
||||
builder.addSvids(extraJwtsvid);
|
||||
}
|
||||
|
||||
Workload.JWTSVIDResponse response = builder.build();
|
||||
responseObserver.onNext(response);
|
||||
responseObserver.onCompleted();
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import io.spiffe.svid.jwtsvid.JwtSvid;
|
|||
import lombok.NonNull;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class WorkloadApiClientErrorStub implements WorkloadApiClient {
|
||||
|
||||
|
@ -44,6 +45,16 @@ public class WorkloadApiClientErrorStub implements WorkloadApiClient {
|
|||
throw new JwtSvidException("Testing exception");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull String audience, String... extraAudience) throws JwtSvidException {
|
||||
throw new JwtSvidException("Testing exception");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull SpiffeId subject, @NonNull String audience, String... extraAudience) throws JwtSvidException {
|
||||
throw new JwtSvidException("Testing exception");
|
||||
}
|
||||
|
||||
@Override
|
||||
public JwtBundleSet fetchJwtBundles() throws JwtBundleException {
|
||||
throw new JwtBundleException("Testing exception");
|
||||
|
|
|
@ -23,27 +23,28 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.KeyPair;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.time.Clock;
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
|
||||
import static io.spiffe.utils.TestUtils.toUri;
|
||||
|
||||
public class WorkloadApiClientStub implements WorkloadApiClient {
|
||||
|
||||
static final Duration JWT_TTL = Duration.ofSeconds(60);
|
||||
final String privateKey = "testdata/workloadapi/svid.key.der";
|
||||
final String svid = "testdata/workloadapi/svid.der";
|
||||
final String x509Bundle = "testdata/workloadapi/bundle.der";
|
||||
final String jwtBundle = "testdata/workloadapi/bundle.json";
|
||||
final SpiffeId subject = SpiffeId.parse("spiffe://example.org/workload-server");
|
||||
final SpiffeId extraSubject = SpiffeId.parse("spiffe://example.org/extra-workload-server");
|
||||
|
||||
int fetchJwtSvidCallCount = 0;
|
||||
|
||||
boolean closed;
|
||||
|
||||
Clock clock = Clock.systemDefaultZone();
|
||||
|
||||
@Override
|
||||
public X509Context fetchX509Context() {
|
||||
return generateX509Context();
|
||||
|
@ -68,14 +69,33 @@ public class WorkloadApiClientStub implements WorkloadApiClient {
|
|||
|
||||
@Override
|
||||
public JwtSvid fetchJwtSvid(@NonNull final String audience, final String... extraAudience) throws JwtSvidException {
|
||||
fetchJwtSvidCallCount++;
|
||||
return generateJwtSvid(subject, audience, extraAudience);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JwtSvid fetchJwtSvid(@NonNull final SpiffeId subject, @NonNull final String audience, final String... extraAudience) throws JwtSvidException {
|
||||
fetchJwtSvidCallCount++;
|
||||
return generateJwtSvid(subject, audience, extraAudience);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull String audience, String... extraAudience) throws JwtSvidException {
|
||||
fetchJwtSvidCallCount++;
|
||||
List<JwtSvid> svids = new ArrayList<>();
|
||||
svids.add(generateJwtSvid(subject, audience, extraAudience));
|
||||
svids.add(generateJwtSvid(extraSubject, audience, extraAudience));
|
||||
return svids;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull SpiffeId subject, @NonNull String audience, String... extraAudience) throws JwtSvidException {
|
||||
fetchJwtSvidCallCount++;
|
||||
List<JwtSvid> svids = new ArrayList<>();
|
||||
svids.add(generateJwtSvid(subject, audience, extraAudience));
|
||||
return svids;
|
||||
}
|
||||
|
||||
@Override
|
||||
public JwtBundleSet fetchJwtBundles() throws JwtBundleException {
|
||||
return generateJwtBundleSet();
|
||||
|
@ -123,14 +143,15 @@ public class WorkloadApiClientStub implements WorkloadApiClient {
|
|||
Map<String, Object> claims = new HashMap<>();
|
||||
claims.put("sub", subject.toString());
|
||||
claims.put("aud", new ArrayList<>(audParam));
|
||||
Date expiration = new Date(System.currentTimeMillis() + 3600000);
|
||||
claims.put("exp", expiration);
|
||||
|
||||
claims.put("iat", new Date(clock.millis()));
|
||||
claims.put("exp", new Date(clock.millis() + JWT_TTL.toMillis()));
|
||||
|
||||
KeyPair keyPair = TestUtils.generateECKeyPair(Curve.P_521);
|
||||
|
||||
String token = TestUtils.generateToken(claims, keyPair, "authority1");
|
||||
|
||||
return JwtSvid.parseInsecure(token, audParam);
|
||||
return JwtSvid.parseInsecure(token, audParam, "external");
|
||||
}
|
||||
|
||||
|
||||
|
@ -164,9 +185,25 @@ public class WorkloadApiClientStub implements WorkloadApiClient {
|
|||
Path pathKey = Paths.get(toUri(privateKey));
|
||||
byte[] keyBytes = Files.readAllBytes(pathKey);
|
||||
|
||||
return X509Svid.parseRaw(svidBytes, keyBytes);
|
||||
return X509Svid.parseRaw(svidBytes, keyBytes, "internal");
|
||||
} catch (X509SvidException | IOException | URISyntaxException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
void resetFetchJwtSvidCallCount() {
|
||||
fetchJwtSvidCallCount = 0;
|
||||
}
|
||||
|
||||
int getFetchJwtSvidCallCount() {
|
||||
return fetchJwtSvidCallCount;
|
||||
}
|
||||
|
||||
Clock getClock() {
|
||||
return clock;
|
||||
}
|
||||
|
||||
void setClock(Clock clock) {
|
||||
this.clock = clock;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,8 +11,7 @@ import java.util.concurrent.ScheduledExecutorService;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.verifyNoInteractions;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
class RetryHandlerTest {
|
||||
|
||||
|
@ -78,7 +77,8 @@ class RetryHandlerTest {
|
|||
|
||||
// fourth retry exceeds max retries
|
||||
retryHandler.scheduleRetry(runnable);
|
||||
verifyNoInteractions(scheduledExecutorService);
|
||||
verify(scheduledExecutorService).isShutdown();
|
||||
verifyNoMoreInteractions(scheduledExecutorService);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -101,43 +101,13 @@ public class TestUtils {
|
|||
public static JWTClaimsSet buildJWTClaimSetFromClaimsMap(Map<String, Object> claims) {
|
||||
return new JWTClaimsSet.Builder()
|
||||
.subject((String) claims.get("sub"))
|
||||
.issueTime((Date) claims.get("iat"))
|
||||
.expirationTime((Date) claims.get("exp"))
|
||||
.audience((List<String>) claims.get("aud"))
|
||||
.build();
|
||||
}
|
||||
|
||||
public static void setEnvironmentVariable(String variableName, String value) throws Exception {
|
||||
Class<?> processEnvironment = Class.forName("java.lang.ProcessEnvironment");
|
||||
|
||||
Field unmodifiableMapField = getField(processEnvironment, "theUnmodifiableEnvironment");
|
||||
Object unmodifiableMap = unmodifiableMapField.get(null);
|
||||
injectIntoUnmodifiableMap(variableName, value, unmodifiableMap);
|
||||
|
||||
Field mapField = getField(processEnvironment, "theEnvironment");
|
||||
Map<String, String> map = (Map<String, String>) mapField.get(null);
|
||||
map.put(variableName, value);
|
||||
}
|
||||
|
||||
public static Object invokeMethod(Class<?> clazz, String methodName, Object... args) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
|
||||
Method method = clazz.getDeclaredMethod(methodName);
|
||||
method.setAccessible(true);
|
||||
return method.invoke(args);
|
||||
}
|
||||
|
||||
public static Field getField(Class<?> clazz, String fieldName) throws NoSuchFieldException {
|
||||
Field field = clazz.getDeclaredField(fieldName);
|
||||
field.setAccessible(true);
|
||||
return field;
|
||||
}
|
||||
|
||||
public static URI toUri(String path) throws URISyntaxException {
|
||||
return Thread.currentThread().getContextClassLoader().getResource(path).toURI();
|
||||
}
|
||||
|
||||
private static void injectIntoUnmodifiableMap(String key, String value, Object map) throws ReflectiveOperationException {
|
||||
Class unmodifiableMap = Class.forName("java.util.Collections$UnmodifiableMap");
|
||||
Field field = getField(unmodifiableMap, "m");
|
||||
Object obj = field.get(map);
|
||||
((Map<String, String>) obj).put(key, value);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,15 +10,14 @@ The Helper automatically gets the SVID updates and stores them in the KeyStore a
|
|||
|
||||
On Linux:
|
||||
|
||||
`java -jar java-spiffe-helper-0.7.0-linux-x86_64.jar -c helper.conf`
|
||||
`java -jar java-spiffe-helper-0.8.12-linux-x86_64.jar`
|
||||
|
||||
On Mac OS:
|
||||
|
||||
`java -jar java-spiffe-helper-0.7.0-osx-x86_64.jar -c helper.conf`
|
||||
`java -jar java-spiffe-helper-0.8.12-osx-x86_64.jar`
|
||||
|
||||
Either `-c` or `--config` should be used to pass the path to the config file.
|
||||
|
||||
(The jar can be downloaded from [Github releases](https://github.com/spiffe/java-spiffe/releases/tag/v0.7.0))
|
||||
You can run the utility with the `-c` or `--config` option to specify the path to the configuration file. By default, it
|
||||
will look for a configuration file named `conf/java-spiffe-helper.properties` in the current working directory.
|
||||
|
||||
## Config file
|
||||
|
||||
|
@ -39,20 +38,19 @@ spiffeSocketPath = unix:/tmp/agent.sock
|
|||
|
||||
### Configuration Properties
|
||||
|
||||
|Configuration | Description | Default value |
|
||||
|------------------|--------------------------------------------------------------------------------| ------------- |
|
||||
|`keyStorePath` | Path to the Java KeyStore File for storing the Private Key and chain of certs | none |
|
||||
|`keyStorePass` | Password to protect the Java KeyStore File | none |
|
||||
|`keyPass` | Password to protect the Private Key entry in the KeyStore | none |
|
||||
|`trustStorePath` | Path to the Java TrustStore File for storing the trusted bundles | none |
|
||||
|`trustStorePass` | Password to protect the Java TrustStore File | none |
|
||||
|`keyStoreType` | Java KeyStore Type. (`pkcs12` and `jks` are supported). Case insensitive. | pkcs12 |
|
||||
|`keyAlias` | Alias for the Private Key entry | spiffe |
|
||||
|`spiffeSocketPath`| Path the Workload API | Read from the system variable: SPIFFE_ENDPOINT_SOCKET |
|
||||
|
||||
KeyStore and TrustStore **must** be in separate files. If `keyStorePath` and `trustStorePath` points to the same file, an error
|
||||
is shown
|
||||
.
|
||||
| Configuration | Description | Default value |
|
||||
|--------------------|-------------------------------------------------------------------------------|-------------------------------------------------------|
|
||||
| `keyStorePath` | Path to the Java KeyStore File for storing the Private Key and chain of certs | none |
|
||||
| `keyStorePass` | Password to protect the Java KeyStore File | none |
|
||||
| `keyPass` | Password to protect the Private Key entry in the KeyStore | none |
|
||||
| `trustStorePath` | Path to the Java TrustStore File for storing the trusted bundles | none |
|
||||
| `trustStorePass` | Password to protect the Java TrustStore File | none |
|
||||
| `keyStoreType` | Java KeyStore Type. (`pkcs12` and `jks` are supported). Case insensitive. | pkcs12 |
|
||||
| `keyAlias` | Alias for the Private Key entry | spiffe |
|
||||
| `spiffeSocketPath` | Path the Workload API | Read from the system variable: SPIFFE_ENDPOINT_SOCKET |
|
||||
|
||||
KeyStore and TrustStore **must** be in separate files. If `keyStorePath` and `trustStorePath` points to the same file,
|
||||
an error is shown.
|
||||
If the store files do not exist, they are created.
|
||||
|
||||
The default and **recommended KeyStore Type** is `PKCS12`. The same type is used for both KeyStore and TrustStore.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
plugins {
|
||||
id "com.github.johnrengelman.shadow" version "5.2.0"
|
||||
id "com.github.johnrengelman.shadow" version "${shadowVersion}"
|
||||
}
|
||||
|
||||
description = "Java SPIFFE Library Helper module to store X.509 SVIDs and Bundles in a Java KeyStore in disk"
|
||||
|
@ -9,7 +9,8 @@ apply plugin: 'com.github.johnrengelman.shadow'
|
|||
assemble.dependsOn shadowJar
|
||||
|
||||
shadowJar {
|
||||
archiveClassifier = osdetector.classifier
|
||||
mergeServiceFiles()
|
||||
archiveClassifier = project.hasProperty('archiveClassifier') && project.archiveClassifier != "" ? project.archiveClassifier : osdetector.classifier
|
||||
manifest {
|
||||
attributes 'Main-Class': 'io.spiffe.helper.cli.Runner'
|
||||
}
|
||||
|
@ -20,12 +21,19 @@ dependencies {
|
|||
|
||||
// runtimeOnly grpc-netty dependency module will be included in the shadowJar
|
||||
if (osdetector.os.is('osx') ) {
|
||||
runtimeOnly(project(':java-spiffe-core:grpc-netty-macos'))
|
||||
project.ext.osArch = System.getProperty("os.arch")
|
||||
if ("x86_64" == project.ext.osArch) {
|
||||
runtimeOnly(project(':java-spiffe-core:grpc-netty-macos'))
|
||||
} else if ("aarch64" == project.ext.osArch) {
|
||||
runtimeOnly(project(':java-spiffe-core:grpc-netty-macos-aarch64'))
|
||||
} else {
|
||||
throw new GradleException("Architecture not supported: " + project.ext.osArch)
|
||||
}
|
||||
} else {
|
||||
runtimeOnly(project(':java-spiffe-core:grpc-netty-linux'))
|
||||
}
|
||||
|
||||
implementation group: 'commons-cli', name: 'commons-cli', version: '1.4'
|
||||
implementation group: 'commons-cli', name: 'commons-cli', version: '1.9.0'
|
||||
|
||||
testImplementation(testFixtures(project(":java-spiffe-core")))
|
||||
}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
archiveClassifier=
|
|
@ -4,11 +4,7 @@ import io.spiffe.helper.exception.RunnerException;
|
|||
import io.spiffe.helper.keystore.KeyStoreHelper.KeyStoreOptions;
|
||||
import io.spiffe.helper.keystore.KeyStoreType;
|
||||
import lombok.val;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -16,17 +12,18 @@ import java.io.InputStream;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.InvalidParameterException;
|
||||
import java.util.Properties;
|
||||
|
||||
class Config {
|
||||
|
||||
private static final String DEFAULT_CONFIG_FILENAME = "conf/java-spiffe-helper.properties";
|
||||
|
||||
static final Option CONFIG_FILE_OPTION =
|
||||
Option.builder("c")
|
||||
.longOpt("config")
|
||||
.hasArg(true)
|
||||
.required(true)
|
||||
.build();
|
||||
.longOpt("config")
|
||||
.hasArg(true)
|
||||
.required(false)
|
||||
.build();
|
||||
|
||||
private Config() {
|
||||
}
|
||||
|
@ -42,17 +39,17 @@ class Config {
|
|||
return properties;
|
||||
}
|
||||
|
||||
static String getCliConfigOption(final String... args) throws RunnerException {
|
||||
static String getCliConfigOption(final String... args) throws ParseException {
|
||||
final Options cliOptions = new Options();
|
||||
cliOptions.addOption(CONFIG_FILE_OPTION);
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
try {
|
||||
val cmd = parser.parse(cliOptions, args);
|
||||
return cmd.getOptionValue("config");
|
||||
} catch (ParseException e) {
|
||||
val error = String.format("%s. Use -c, --config <arg>", e.getMessage());
|
||||
throw new RunnerException(error);
|
||||
}
|
||||
|
||||
CommandLine cmd = parser.parse(cliOptions, args);
|
||||
return cmd.getOptionValue("config", getDefaultConfigPath());
|
||||
}
|
||||
|
||||
private static String getDefaultConfigPath() {
|
||||
return Paths.get(System.getProperty("user.dir"), DEFAULT_CONFIG_FILENAME).toString();
|
||||
}
|
||||
|
||||
static KeyStoreOptions createKeyStoreOptions(final Properties properties) {
|
||||
|
@ -89,7 +86,7 @@ class Config {
|
|||
static String getProperty(final Properties properties, final String key) {
|
||||
final String value = properties.getProperty(key);
|
||||
if (StringUtils.isBlank(value)) {
|
||||
throw new InvalidParameterException(String.format("Missing value for config property: %s", key));
|
||||
throw new IllegalArgumentException(String.format("Missing value for config property: %s", key));
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
|
|
@ -6,13 +6,14 @@ import io.spiffe.helper.exception.RunnerException;
|
|||
import io.spiffe.helper.keystore.KeyStoreHelper;
|
||||
import lombok.extern.java.Log;
|
||||
import lombok.val;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
|
||||
import java.nio.file.Paths;
|
||||
import java.security.InvalidParameterException;
|
||||
import java.security.KeyStoreException;
|
||||
|
||||
/**
|
||||
* Entry point of the CLI to run the KeyStoreHelper.
|
||||
* Entry point of the java-spiffe-helper CLI application.
|
||||
*/
|
||||
@Log
|
||||
public class Runner {
|
||||
|
@ -20,15 +21,19 @@ public class Runner {
|
|||
private Runner() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Entry method of the CLI to run the {@link KeyStoreHelper}.
|
||||
* <p>
|
||||
* In the args needs to be passed the config file option as: "-c" and "path_to_config_file"
|
||||
*
|
||||
* @param args contains the option with the config file path
|
||||
* @throws RunnerException is there is an error configuring or creating the KeyStoreHelper.
|
||||
*/
|
||||
public static void main(final String ...args) throws RunnerException {
|
||||
public static void main(final String... args) {
|
||||
try {
|
||||
runApplication(args);
|
||||
} catch (RunnerException e) {
|
||||
log.severe(ExceptionUtils.getStackTrace(e));
|
||||
System.exit(1);
|
||||
} catch (ParseException | IllegalArgumentException e) {
|
||||
log.severe(e.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
static void runApplication(final String... args) throws RunnerException, ParseException {
|
||||
try {
|
||||
val configFilePath = Config.getCliConfigOption(args);
|
||||
val properties = Config.parseConfigFileProperties(Paths.get(configFilePath));
|
||||
|
@ -36,8 +41,7 @@ public class Runner {
|
|||
try (val keyStoreHelper = KeyStoreHelper.create(options)) {
|
||||
keyStoreHelper.run(true);
|
||||
}
|
||||
} catch (SocketEndpointAddressException | KeyStoreHelperException | RunnerException | InvalidParameterException | KeyStoreException e) {
|
||||
log.severe(e.getMessage());
|
||||
} catch (SocketEndpointAddressException | KeyStoreHelperException | KeyStoreException e) {
|
||||
throw new RunnerException(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,23 +44,29 @@ class KeyStore {
|
|||
|
||||
private java.security.KeyStore loadKeyStore() throws KeyStoreException {
|
||||
try {
|
||||
val keyStore = java.security.KeyStore.getInstance(keyStoreType.value());
|
||||
|
||||
// Initialize KeyStore
|
||||
if (Files.exists(keyStoreFilePath)) {
|
||||
try (final InputStream inputStream = Files.newInputStream(keyStoreFilePath)) {
|
||||
keyStore.load(inputStream, keyStorePassword.toCharArray());
|
||||
}
|
||||
} else {
|
||||
//create new keyStore
|
||||
keyStore.load(null, keyStorePassword.toCharArray());
|
||||
}
|
||||
return keyStore;
|
||||
return loadKeyStoreFromFile();
|
||||
} catch (IOException | NoSuchAlgorithmException | CertificateException e) {
|
||||
throw new KeyStoreException("KeyStore cannot be created", e);
|
||||
}
|
||||
}
|
||||
|
||||
private java.security.KeyStore loadKeyStoreFromFile() throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException {
|
||||
val keyStore = java.security.KeyStore.getInstance(keyStoreType.value());
|
||||
|
||||
// Initialize KeyStore
|
||||
if (Files.exists(keyStoreFilePath)) {
|
||||
try (final InputStream inputStream = Files.newInputStream(keyStoreFilePath)) {
|
||||
keyStore.load(inputStream, keyStorePassword.toCharArray());
|
||||
} catch (IOException e) {
|
||||
throw new KeyStoreException("KeyStore cannot be opened", e);
|
||||
}
|
||||
} else {
|
||||
// Create a new KeyStore if it doesn't exist
|
||||
keyStore.load(null, keyStorePassword.toCharArray());
|
||||
}
|
||||
return keyStore;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Store a private key and X.509 certificate chain in a Java KeyStore
|
||||
|
|
|
@ -4,6 +4,7 @@ import io.spiffe.helper.exception.RunnerException;
|
|||
import io.spiffe.helper.keystore.KeyStoreHelper;
|
||||
import io.spiffe.helper.keystore.KeyStoreType;
|
||||
import lombok.val;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
|
@ -12,8 +13,7 @@ import java.nio.file.Paths;
|
|||
import java.util.Properties;
|
||||
|
||||
import static io.spiffe.utils.TestUtils.toUri;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class ConfigTest {
|
||||
|
||||
|
@ -56,7 +56,7 @@ class ConfigTest {
|
|||
try {
|
||||
String option = Config.getCliConfigOption("-c", "test");
|
||||
assertEquals("test", option);
|
||||
} catch (RunnerException e) {
|
||||
} catch (ParseException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
@ -66,27 +66,28 @@ class ConfigTest {
|
|||
try {
|
||||
String option = Config.getCliConfigOption("--config", "example");
|
||||
assertEquals("example", option);
|
||||
} catch (RunnerException e) {
|
||||
} catch (ParseException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void getCliConfigOption_unknownOption() {
|
||||
try {
|
||||
String option = Config.getCliConfigOption("-a", "test");
|
||||
} catch (RunnerException e) {
|
||||
assertEquals("Unrecognized option: -a. Use -c, --config <arg>", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetCliConfigOption_unknownLongOption() {
|
||||
try {
|
||||
Config.getCliConfigOption("--unknown", "example");
|
||||
fail("expected parse exception");
|
||||
} catch (RunnerException e) {
|
||||
assertEquals("Unrecognized option: --unknown. Use -c, --config <arg>", e.getMessage());
|
||||
} catch (ParseException e) {
|
||||
assertTrue(e.getMessage().startsWith("Unrecognized option: --unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void getCliConfigOption_unknownOption() {
|
||||
try {
|
||||
String option = Config.getCliConfigOption("-a", "test");
|
||||
fail("expected parse exception");
|
||||
} catch (ParseException e) {
|
||||
assertTrue(e.getMessage().startsWith("Unrecognized option: -a"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ package io.spiffe.helper.cli;
|
|||
|
||||
import io.spiffe.helper.exception.RunnerException;
|
||||
import lombok.val;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -17,70 +18,70 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
class RunnerTest {
|
||||
|
||||
@Test
|
||||
void test_Main_KeyStorePathIsMissing() throws URISyntaxException {
|
||||
void test_Main_KeyStorePathIsMissing() throws URISyntaxException, RunnerException, ParseException {
|
||||
final Path path = Paths.get(toUri("testdata/cli/missing-keystorepath.conf"));
|
||||
try {
|
||||
Runner.main("-c", path.toString());
|
||||
Runner.runApplication("-c", path.toString());
|
||||
fail("expected exception: property is missing");
|
||||
} catch (RunnerException e) {
|
||||
assertEquals("Missing value for config property: keyStorePath", e.getCause().getMessage());
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Missing value for config property: keyStorePath", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_Main_KeyStorePassIsMissing() throws URISyntaxException {
|
||||
void test_Main_KeyStorePassIsMissing() throws URISyntaxException, RunnerException, ParseException {
|
||||
final Path path = Paths.get(toUri("testdata/cli/missing-keystorepass.conf"));
|
||||
try {
|
||||
Runner.main("-c", path.toString());
|
||||
Runner.runApplication("-c", path.toString());
|
||||
fail("expected exception: property is missing");
|
||||
} catch (RunnerException e) {
|
||||
assertEquals("Missing value for config property: keyStorePass", e.getCause().getMessage());
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Missing value for config property: keyStorePass", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_Main_KeyPassIsMissing() throws URISyntaxException {
|
||||
void test_Main_KeyPassIsMissing() throws URISyntaxException, RunnerException, ParseException {
|
||||
final Path path = Paths.get(toUri("testdata/cli/missing-keypass.conf"));
|
||||
try {
|
||||
Runner.main("-c", path.toString());
|
||||
Runner.runApplication("-c", path.toString());
|
||||
fail("expected exception: property is missing");
|
||||
} catch (RunnerException e) {
|
||||
assertEquals("Missing value for config property: keyPass", e.getCause().getMessage());
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Missing value for config property: keyPass", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_Main_TrustStorePathIsMissing() throws URISyntaxException {
|
||||
void test_Main_TrustStorePathIsMissing() throws URISyntaxException, RunnerException, ParseException {
|
||||
final Path path = Paths.get(toUri("testdata/cli/missing-truststorepath.conf"));
|
||||
try {
|
||||
Runner.main("-c", path.toString());
|
||||
Runner.runApplication("-c", path.toString());
|
||||
fail("expected exception: property is missing");
|
||||
} catch (RunnerException e) {
|
||||
assertEquals("Missing value for config property: trustStorePath", e.getCause().getMessage());
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Missing value for config property: trustStorePath", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_Main_TrustStorePassIsMissing() throws URISyntaxException {
|
||||
void test_Main_TrustStorePassIsMissing() throws URISyntaxException, RunnerException, ParseException {
|
||||
final Path path = Paths.get(toUri("testdata/cli/missing-truststorepass.conf"));
|
||||
try {
|
||||
Runner.main("-c", path.toString());
|
||||
Runner.runApplication("-c", path.toString());
|
||||
fail("expected exception: property is missing");
|
||||
} catch (RunnerException e) {
|
||||
assertEquals("Missing value for config property: trustStorePass", e.getCause().getMessage());
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Missing value for config property: trustStorePass", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_Main_throwsExceptionIfTheKeystoreCannotBeCreated() throws URISyntaxException, IOException {
|
||||
void test_Main_throwsExceptionIfTheKeystoreCannotBeCreated() throws URISyntaxException, IOException, ParseException {
|
||||
val file = new File("keystore123.p12");
|
||||
file.createNewFile();
|
||||
|
||||
val configPath = Paths.get(toUri("testdata/cli/correct.conf"));
|
||||
try {
|
||||
Runner.main("-c", configPath.toString());
|
||||
Runner.runApplication("-c", configPath.toString());
|
||||
} catch (RunnerException e) {
|
||||
assertEquals("KeyStore cannot be created", e.getCause().getMessage());
|
||||
assertEquals("KeyStore cannot be opened", e.getCause().getMessage());
|
||||
} finally {
|
||||
file.delete();
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import org.apache.commons.lang3.RandomStringUtils;
|
|||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import uk.org.webcompere.systemstubs.environment.EnvironmentVariables;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
@ -190,13 +191,14 @@ class KeyStoreHelperTest {
|
|||
|
||||
@Test
|
||||
void testCreateKeyStoreHelper_createNewClient() throws Exception {
|
||||
TestUtils.setEnvironmentVariable(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/test");
|
||||
val options = getKeyStoreValidOptions(null);
|
||||
try {
|
||||
KeyStoreHelper.create(options);
|
||||
} catch (KeyStoreHelperException e) {
|
||||
fail();
|
||||
}
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "unix:/tmp/test").execute(() -> {
|
||||
val options = getKeyStoreValidOptions(null);
|
||||
try {
|
||||
KeyStoreHelper.create(options);
|
||||
} catch (KeyStoreHelperException e) {
|
||||
fail();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -203,7 +203,7 @@ public class KeyStoreTest {
|
|||
.keyStorePassword("example")
|
||||
.build();
|
||||
} catch (KeyStoreException e) {
|
||||
assertEquals("KeyStore cannot be created", e.getMessage());
|
||||
assertEquals("KeyStore cannot be opened", e.getMessage());
|
||||
} finally {
|
||||
file.delete();
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import io.spiffe.workloadapi.WorkloadApiClient;
|
|||
import io.spiffe.workloadapi.X509Context;
|
||||
import lombok.NonNull;
|
||||
|
||||
import java.util.List;
|
||||
import java.io.IOException;
|
||||
|
||||
public class WorkloadApiClientErrorStub implements WorkloadApiClient {
|
||||
|
@ -46,7 +47,15 @@ public class WorkloadApiClientErrorStub implements WorkloadApiClient {
|
|||
public JwtSvid fetchJwtSvid(@NonNull final SpiffeId subject, @NonNull final String audience, final String... extraAudience) throws JwtSvidException {
|
||||
throw new JwtSvidException("Testing exception");
|
||||
}
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull final String audience, final String... extraAudience) throws JwtSvidException {
|
||||
throw new JwtSvidException("Testing exception");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull final SpiffeId subject, @NonNull final String audience, final String... extraAudience) throws JwtSvidException {
|
||||
throw new JwtSvidException("Testing exception");
|
||||
}
|
||||
@Override
|
||||
public JwtBundleSet fetchJwtBundles() throws JwtBundleException {
|
||||
throw new JwtBundleException("Testing exception");
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Collections;
|
||||
|
||||
public class WorkloadApiClientStub implements WorkloadApiClient {
|
||||
|
@ -64,6 +65,15 @@ public class WorkloadApiClientStub implements WorkloadApiClient {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull String audience, String... extraAudience) throws JwtSvidException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JwtSvid> fetchJwtSvids(@NonNull final SpiffeId subject, @NonNull final String audience, final String... extraAudience) throws JwtSvidException {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public JwtBundleSet fetchJwtBundles() throws JwtBundleException {
|
||||
return null;
|
||||
|
|
|
@ -137,6 +137,36 @@ export SPIFFE_ENDPOINT_SOCKET=/tmp/agent.sock
|
|||
|
||||
## Use Cases
|
||||
|
||||
### Connect to Postgres DB using TLS and the SPIFFE SslSocketFactory
|
||||
|
||||
A Java app can connect to a Postgres DB using TLS and authenticate itself using certificates provided by SPIRE through
|
||||
the SPIFFE Workload API. To enable this functionality, there's a custom `SSLSocketFactory` implementation that injects a
|
||||
custom `SSLContext` that uses the SPIFFE `KeyStore` and a `TrustStore` implementations to obtain certificates and bundles
|
||||
from a SPIRE Agent, keep them updated in memory, and provide them for TLS connections.
|
||||
|
||||
The URL to connect to Postgres using TLS and Java SPIFFE is as follows:
|
||||
|
||||
```
|
||||
jdbc:postgresql://localhost:5432/postgres?sslmode=require&sslfactory=io.spiffe.provider.SpiffeSslSocketFactory&sslNegotiation=direct
|
||||
```
|
||||
|
||||
The parameter `sslfactory` in the URL configures the Postgres JDBC driver to use the `SpiffeSslSocketFactory` which wraps
|
||||
around an SSL Socket with the Java SPIFFE functionality. Additional parameter `sslNegotiation` is needed to instantiate
|
||||
`SpiffeSslSocketFactory` correct.
|
||||
|
||||
The Workload API socket endpoint should be configured through the Environment variable `SPIFFE_ENDPOINT_SOCKET`.
|
||||
|
||||
During the connection to a Postgres DB, the server presents its certificate, which is validated using trust bundles
|
||||
obtained from the SPIFFE Workload API.
|
||||
To also validate that the SPIFFE ID presented in the server's certificate is one of a list of expected SPIFFE IDs,
|
||||
the property `ssl.spiffe.accept` needs to be configured with the expected SPIFFE IDs separated by commas.
|
||||
For example:
|
||||
|
||||
```
|
||||
-Dssl.spiffe.accept=spiffe://domain.test/db-1,spiffe://domain.test/db-2'
|
||||
```
|
||||
If this property is not configured, any SPIFFE ID will be accepted in a TLS connection.
|
||||
|
||||
### Configure a Tomcat connector
|
||||
|
||||
***Prerequisite***: Having the SPIFFE Provider configured through the `java.security`.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
plugins {
|
||||
id "com.github.johnrengelman.shadow" version "5.2.0"
|
||||
id "com.github.johnrengelman.shadow" version "${shadowVersion}"
|
||||
}
|
||||
|
||||
description = "Java Security Provider implementation supporting X.509-SVIDs and methods for " +
|
||||
|
@ -10,6 +10,7 @@ apply plugin: 'com.github.johnrengelman.shadow'
|
|||
assemble.dependsOn shadowJar
|
||||
|
||||
shadowJar {
|
||||
mergeServiceFiles()
|
||||
archiveClassifier = "all-".concat(osdetector.classifier)
|
||||
}
|
||||
|
||||
|
@ -18,7 +19,14 @@ dependencies {
|
|||
|
||||
// runtimeOnly grpc-netty dependency module will be included in the shadowJar
|
||||
if (osdetector.os.is('osx') ) {
|
||||
runtimeOnly(project(':java-spiffe-core:grpc-netty-macos'))
|
||||
project.ext.osArch = System.getProperty("os.arch")
|
||||
if ("x86_64" == project.ext.osArch) {
|
||||
runtimeOnly(project(':java-spiffe-core:grpc-netty-macos'))
|
||||
} else if ("aarch64" == project.ext.osArch) {
|
||||
runtimeOnly(project(':java-spiffe-core:grpc-netty-macos-aarch64'))
|
||||
} else {
|
||||
throw new GradleException("Architecture not supported: " + project.ext.osArch)
|
||||
}
|
||||
} else {
|
||||
runtimeOnly(project(':java-spiffe-core:grpc-netty-linux'))
|
||||
}
|
||||
|
|
|
@ -1,23 +1,77 @@
|
|||
package io.spiffe.provider;
|
||||
|
||||
import io.spiffe.exception.SocketEndpointAddressException;
|
||||
import io.spiffe.exception.X509SourceException;
|
||||
import io.spiffe.provider.SpiffeSslContextFactory.SslContextOptions;
|
||||
import io.spiffe.spiffeid.SpiffeId;
|
||||
import io.spiffe.spiffeid.SpiffeIdUtils;
|
||||
import io.spiffe.workloadapi.DefaultX509Source;
|
||||
import io.spiffe.workloadapi.X509Source;
|
||||
import lombok.extern.java.Log;
|
||||
import lombok.val;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.Socket;
|
||||
import java.security.KeyManagementException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import static io.spiffe.provider.SpiffeProviderConstants.SSL_SPIFFE_ACCEPT_PROPERTY;
|
||||
|
||||
/**
|
||||
* Implementation of {@link SSLSocketFactory} that provides methods to create {@link javax.net.ssl.SSLSocket}
|
||||
* backed by a SPIFFE SSLContext {@link SpiffeSslContextFactory}.
|
||||
*/
|
||||
@Log
|
||||
public class SpiffeSslSocketFactory extends SSLSocketFactory {
|
||||
|
||||
private final SSLSocketFactory delegate;
|
||||
|
||||
/**
|
||||
* Default Constructor.
|
||||
*
|
||||
* This SpiffeSslSocketFactory is backed by SPIFFE-aware SSLContext that obtains certificates
|
||||
* from the SPIFFE Workload API, connecting to a socket configured through the environment variable
|
||||
* 'SPIFFE_ENDPOINT_SOCKET'.
|
||||
*
|
||||
* The list of accepted SPIFFE IDs, that will be used to validate the SAN in a peer certificate,
|
||||
* can be configured through the property 'ssl.spiffe.accept', separating the SPIFFE IDs using commas
|
||||
* without spaces, e.g., '-Dssl.spiffe.accept=spiffe://domain.test/service,spiffe://example.org/app'
|
||||
* If the property is not set, any SPIFFE ID will be accepted in a TLS connection.
|
||||
*
|
||||
* @throws NoSuchAlgorithmException if there is a problem creating the SSL context
|
||||
* @throws KeyManagementException if there is a problem initializing the SSL context
|
||||
* @throws X509SourceException if there is a problem creating the source of X.509 certificates
|
||||
* @throws SocketEndpointAddressException if there is a problem connecting to the local SPIFFE socket
|
||||
*
|
||||
*/
|
||||
public SpiffeSslSocketFactory() throws SocketEndpointAddressException, X509SourceException, NoSuchAlgorithmException, KeyManagementException {
|
||||
log.log(Level.INFO, "Creating SpiffeSslSocketFactory");
|
||||
|
||||
SSLContext sslContext;
|
||||
Supplier<Set<SpiffeId>> acceptedSpiffeIds;
|
||||
SslContextOptions options;
|
||||
|
||||
X509Source x509source = DefaultX509Source.newSource();
|
||||
String envProperty = EnvironmentUtils.getProperty(SSL_SPIFFE_ACCEPT_PROPERTY);
|
||||
|
||||
if (StringUtils.isNotBlank(envProperty)) {
|
||||
acceptedSpiffeIds = () -> SpiffeIdUtils.toSetOfSpiffeIds(envProperty, ',');
|
||||
options = SslContextOptions.builder().acceptedSpiffeIdsSupplier(acceptedSpiffeIds).x509Source(x509source).build();
|
||||
} else {
|
||||
options = SslContextOptions.builder().acceptAnySpiffeId().x509Source(x509source).build();
|
||||
}
|
||||
|
||||
sslContext = SpiffeSslContextFactory.getSslContext(options);
|
||||
delegate = sslContext.getSocketFactory();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
|
|
|
@ -38,11 +38,6 @@ public class SpiffeKeyManagerTest {
|
|||
void setup() throws Exception {
|
||||
MockitoAnnotations.initMocks(this);
|
||||
|
||||
val rootCa = createRootCA("C = US, O = SPIFFE", "spiffe://domain.test");
|
||||
val leaf = createCertificate("C = US, O = SPIRE", "C = US, O = SPIRE", "spiffe://domain.test/workload", rootCa, false);
|
||||
|
||||
X509Svid svid = X509Svid.parseRaw(leaf.getCertificate().getEncoded(), leaf.getKeyPair().getPrivate().getEncoded());
|
||||
|
||||
x509Svid = X509Svid.load(
|
||||
Paths.get(toUri("testdata/cert.pem")),
|
||||
Paths.get(toUri("testdata/key.pem")));
|
||||
|
|
|
@ -4,6 +4,7 @@ import io.spiffe.utils.TestUtils;
|
|||
import io.spiffe.workloadapi.Address;
|
||||
import io.spiffe.workloadapi.X509Source;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import uk.org.webcompere.systemstubs.environment.EnvironmentVariables;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
|
@ -24,11 +25,12 @@ class X509SourceManagerTest {
|
|||
|
||||
@Test
|
||||
void getX509Source_defaultAddressNotSet() throws Exception {
|
||||
TestUtils.setEnvironmentVariable(Address.SOCKET_ENV_VARIABLE, "" );
|
||||
try {
|
||||
X509SourceManager.getX509Source();
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
new EnvironmentVariables(Address.SOCKET_ENV_VARIABLE, "").execute(() -> {
|
||||
try {
|
||||
X509SourceManager.getX509Source();
|
||||
} catch (IllegalStateException e) {
|
||||
assertEquals("Endpoint Socket Address Environment Variable is not set: SPIFFE_ENDPOINT_SOCKET", e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -4,3 +4,4 @@ include 'java-spiffe-provider'
|
|||
include 'java-spiffe-helper'
|
||||
include 'java-spiffe-core:grpc-netty-linux'
|
||||
include 'java-spiffe-core:grpc-netty-macos'
|
||||
include 'java-spiffe-core:grpc-netty-macos-aarch64'
|
Loading…
Reference in New Issue