Updated RBAC setup

This commit is contained in:
Yinan Li 2018-01-22 12:52:41 -08:00
parent b6656a7dc5
commit 2ae5e86c09
6 changed files with 63 additions and 18 deletions

1
.gitignore vendored
View File

@ -3,3 +3,4 @@ vendor/
spark-operator
.idea/
**/*.iml
sparkctl/sparkctl

View File

@ -14,6 +14,6 @@
# limitations under the License.
#
FROM liyinan926/spark-base:v2.3.0
FROM liyinan926/spark:v2.3.0
COPY spark-operator /usr/bin/
ENTRYPOINT ["/user/bin/spark-operator"]

View File

@ -2,6 +2,16 @@
**This is not an official Google product.**
## Table of Contents
1. [Project Status](#project-status)
2. [Prerequisites](#prerequisites)
3. [Spark Operator](#spark-operator)
1. [Build Spark Operator](#build-spark-operator)
2. [Deploying Spark Operator](#deploying-spark-operator)
3. [Configuring Spark Operator](#configuring-spark-operator)
4. [Running the Example Spark Application](#running-the-example-spark-application)
5. [Using the Initializer](#using-the-initializer)
## Project Status
**Project status:** *alpha*
@ -53,7 +63,7 @@ need additional command-line options to get passed in.
Additionally, keeping the CRD implementation outside the Spark repository gives us a lot of flexibility in terms of
functionality to add to the CRD controller. We also have full control over code review and release process.
### Build and Installation
### Build Spark Operator
To get Spark Operator, run the following commands:
@ -101,7 +111,18 @@ make image-tag=<image tag> push
To deploy the Spark operator, run the following command:
```bash
kubectl create -f manifest/spark-operator.yaml
kubectl create -f manifest/
```
This will create a namespace `sparkoperator`, setup RBAC for the Spark Operator to run in the namespace, and create a Deployment named
`sparkoperator` in the namespace.
Due to a [known issue](https://cloud.google.com/kubernetes-engine/docs/how-to/role-based-access-control#defining_permissions_in_a_role)
in GKE, you will need to first grant yourself cluster-admin privileges before you can create custom roles and role bindings on a
GKE cluster versioned 1.6 and up.
```bash
$ kubectl create clusterrolebinding <user>-cluster-admin-binding --clusterrole=cluster-admin --user=<user>@<domain>
```
### Configuring Spark Operator

View File

@ -23,13 +23,13 @@ spec:
type: Scala
mode: cluster
mainClass: org.apache.spark.examples.SparkPi
mainApplicationFile: "local:///opt/spark/examples/jars/spark-examples_2.11-2.3.0-SNAPSHOT.jar"
mainApplicationFile: "local:///opt/spark/examples/jars/spark-examples_2.11-2.4.0-SNAPSHOT.jar"
driver:
image: "liyinan926/spark-driver:v2.3.0"
image: "liyinan926/spark:v2.3.0"
cores: "0.1"
executor:
image: "liyinan926/spark-executor:v2.3.0"
image: "liyinan926/spark:v2.3.0"
instances: 1
memory: "512m"
restartPolicy: OnFailure
restartPolicy: Never
submissionByUser: false

View File

@ -15,15 +15,38 @@
# limitations under the License.
#
apiVersion: v1
kind: Namespace
metadata:
name: sparkoperator
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: sparkoperator
namespace: sparkoperator
---
apiVersion: rbac.authorization.k8s.io/v1beta1
kind: ClusterRole
metadata:
name: sparkoperator
rules:
- apiGroups:
- "*"
resources:
- "*"
verbs:
- "*"
---
apiVersion: rbac.authorization.k8s.io/v1beta1
kind: ClusterRoleBinding
metadata:
name: spark-operator-rbac
name: sparkoperator
subjects:
- kind: ServiceAccount
name: spark-operator
namespace: spark-operator
name: sparkoperator
namespace: sparkoperator
roleRef:
kind: ClusterRole
name: cluster-admin
name: sparkoperator
apiGroup: rbac.authorization.k8s.io

View File

@ -1,4 +1,3 @@
#
# Copyright 2017 Google LLC
#
@ -18,24 +17,25 @@
apiVersion: apps/v1beta1
kind: Deployment
metadata:
name: spark-operator
namespace: default
name: sparkoperator
namespace: sparkoperator
labels:
app: spark-operator
app: sparkoperator
spec:
replicas: 1
selector:
matchLabels:
app: spark-operator
app: sparkoperator
template:
metadata:
labels:
app: spark-operator
app: sparkoperator
initializers:
pending: []
spec:
serviceAccountName: sparkoperator
containers:
- name: spark-operator
- name: sparkoperator
image: liyinan926/spark-operator:v2.3.0
imagePullPolicy: Always
command: ["/usr/bin/spark-operator"]