Format other frontend code using prettier (#2717)

This commit is contained in:
Yuan (Bob) Gong 2019-12-11 21:04:06 +08:00 committed by Kubernetes Prow Robot
parent 83fb13596b
commit 4500d244d7
21 changed files with 1233 additions and 1277 deletions

View File

@ -1,2 +1 @@
src/generated
server

View File

@ -25,7 +25,7 @@ const config = {
debugWindow: false,
engine: 'puppeteer',
engineOptions: {
args: ['--no-sandbox']
args: ['--no-sandbox'],
},
id: 'pipelines',
onReadyScript: 'steps.js',
@ -44,16 +44,12 @@ const config = {
},
{
label: 'hover on first row',
steps: [
{ action: 'hover', selector: '.tableRow' },
],
steps: [{ action: 'hover', selector: '.tableRow' }],
url,
},
{
label: 'select one row',
steps: [
{ action: 'click', selector: '.tableRow' }
],
steps: [{ action: 'click', selector: '.tableRow' }],
url,
},
{
@ -61,18 +57,15 @@ const config = {
steps: [
{ action: 'click', selector: '.tableRow' },
{ action: 'click', selector: `.tableRow:nth-of-type(2)` },
{ action: 'click', selector: `.tableRow:nth-of-type(5)` }
{ action: 'click', selector: `.tableRow:nth-of-type(5)` },
],
url,
},
{
label: 'open upload dialog',
steps: [
{ action: 'click', selector: '#uploadBtn' },
{ action: 'pause' }
],
steps: [{ action: 'click', selector: '#uploadBtn' }, { action: 'pause' }],
url,
}
},
],
viewports: [{ width: 1024, height: 768 }],
};

View File

@ -24,8 +24,8 @@ export default {
creationTimestamp: '2018-06-06T00:04:49Z',
labels: {
'workflows.argoproj.io/completed': 'true',
'workflows.argoproj.io/phase': 'Succeeded'
}
'workflows.argoproj.io/phase': 'Succeeded',
},
},
spec: {
templates: [
@ -37,25 +37,21 @@ export default {
container: {
name: '',
image: 'docker/whalesay:latest',
command: [
'cowsay'
],
args: [
'{{workflow.parameters.message}}'
],
resources: {}
}
}
command: ['cowsay'],
args: ['{{workflow.parameters.message}}'],
resources: {},
},
},
],
entrypoint: 'whalesay1',
arguments: {
parameters: [
{
name: 'message',
value: 'hello world'
}
]
}
value: 'hello world',
},
],
},
},
status: {
phase: 'Succeeded',
@ -70,8 +66,8 @@ export default {
templateName: 'whalesay1',
phase: 'Succeeded',
startedAt: '2018-06-06T00:04:49Z',
finishedAt: '2018-06-06T00:05:23Z'
}
}
}
finishedAt: '2018-06-06T00:05:23Z',
},
},
},
};

View File

@ -18,14 +18,15 @@ export default {
name: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c',
namespace: 'default',
// tslint:disable-next-line:max-line-length
selfLink: '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/hello-world-61985dbf-4299-458b-a183-1f2c2436c21c',
selfLink:
'/apis/argoproj.io/v1alpha1/namespaces/default/workflows/hello-world-61985dbf-4299-458b-a183-1f2c2436c21c',
uid: 'ef2a4a61-6e84-11e8-bba7-42010a8a0fc2',
resourceVersion: '10690686',
creationTimestamp: '2018-06-12T21:09:46Z',
labels: {
'workflows.argoproj.io/completed': 'true',
'workflows.argoproj.io/phase': 'Succeeded'
}
'workflows.argoproj.io/phase': 'Succeeded',
},
},
spec: {
templates: [
@ -39,10 +40,10 @@ export default {
{
name: 'say',
template: 'say',
arguments: {}
}
]
]
arguments: {},
},
],
],
},
{
name: 'say',
@ -52,18 +53,14 @@ export default {
container: {
name: '',
image: 'docker/whalesay:latest',
command: [
'cowsay'
],
args: [
'hello world'
],
resources: {}
}
}
command: ['cowsay'],
args: ['hello world'],
resources: {},
},
},
],
entrypoint: 'whalesay',
arguments: {}
arguments: {},
},
status: {
phase: 'Succeeded',
@ -79,9 +76,7 @@ export default {
phase: 'Succeeded',
startedAt: '2018-06-12T21:09:46Z',
finishedAt: '2018-06-12T21:09:47Z',
children: [
'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-2303694156'
]
children: ['hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-2303694156'],
},
'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-2303694156': {
id: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-2303694156',
@ -92,9 +87,7 @@ export default {
boundaryID: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c',
startedAt: '2018-06-12T21:09:46Z',
finishedAt: '2018-06-12T21:09:47Z',
children: [
'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-3584189705'
]
children: ['hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-3584189705'],
},
'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-3584189705': {
id: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-3584189705',
@ -105,8 +98,8 @@ export default {
phase: 'Succeeded',
boundaryID: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c',
startedAt: '2018-06-12T21:09:46Z',
finishedAt: '2018-06-12T21:09:47Z'
}
}
}
finishedAt: '2018-06-12T21:09:47Z',
},
},
},
};

View File

@ -17,7 +17,8 @@ export default {
metadata: {
name: 'job-cloneofhelloworldls94q-1-3667110102',
namespace: 'kubeflow',
selfLink: '/apis/argoproj.io/v1alpha1/namespaces/kubeflow/workflows/job-cloneofhelloworldls94q-1-3667110102',
selfLink:
'/apis/argoproj.io/v1alpha1/namespaces/kubeflow/workflows/job-cloneofhelloworldls94q-1-3667110102',
uid: '55dc2b6d-d688-11e8-83db-42010a800093',
resourceVersion: '128069',
creationTimestamp: '2018-10-23T05:56:07Z',
@ -27,7 +28,7 @@ export default {
'scheduledworkflows.kubeflow.org/workflowEpoch': '1540274157',
'scheduledworkflows.kubeflow.org/workflowIndex': '1',
'workflows.argoproj.io/completed': 'true',
'workflows.argoproj.io/phase': 'Succeeded'
'workflows.argoproj.io/phase': 'Succeeded',
},
ownerReferences: [
{
@ -36,9 +37,9 @@ export default {
name: 'job-cloneofhelloworldls94q',
uid: '4fac8e0f-d688-11e8-83db-42010a800093',
controller: true,
blockOwnerDeletion: true
}
]
blockOwnerDeletion: true,
},
],
},
spec: {
templates: [
@ -56,10 +57,10 @@ export default {
parameters: [
{
name: 'message',
value: '{{workflow.parameters.message}} from node: A'
}
]
}
value: '{{workflow.parameters.message}} from node: A',
},
],
},
},
{
name: 'B',
@ -68,13 +69,11 @@ export default {
parameters: [
{
name: 'message',
value: '{{workflow.parameters.message}} from node: B'
}
]
value: '{{workflow.parameters.message}} from node: B',
},
],
},
dependencies: [
'A'
]
dependencies: ['A'],
},
{
name: 'C',
@ -83,13 +82,11 @@ export default {
parameters: [
{
name: 'message',
value: '{{workflow.parameters.message}} from node: C'
}
]
value: '{{workflow.parameters.message}} from node: C',
},
],
},
dependencies: [
'A'
]
dependencies: ['A'],
},
{
name: 'D',
@ -98,49 +95,43 @@ export default {
parameters: [
{
name: 'message',
value: '{{workflow.parameters.message}} from node: D'
}
]
value: '{{workflow.parameters.message}} from node: D',
},
],
},
dependencies: [
'B',
'C'
]
}
]
}
dependencies: ['B', 'C'],
},
],
},
},
{
name: 'echo',
inputs: {
parameters: [
{
name: 'message'
}
]
name: 'message',
},
],
},
outputs: {},
metadata: {},
container: {
name: '',
image: 'alpine:3.7',
command: [
'echo',
'{{inputs.parameters.message}}'
],
resources: {}
}
}
command: ['echo', '{{inputs.parameters.message}}'],
resources: {},
},
},
],
entrypoint: 'diamond',
arguments: {
parameters: [
{
name: 'message',
value: 'hello world'
}
]
}
value: 'hello world',
},
],
},
},
status: {
phase: 'Succeeded',
@ -156,12 +147,8 @@ export default {
phase: 'Succeeded',
startedAt: '2018-10-23T05:56:07Z',
finishedAt: '2018-10-23T05:56:25Z',
children: [
'job-cloneofhelloworldls94q-1-3667110102-3867833025'
],
outboundNodes: [
'job-cloneofhelloworldls94q-1-3667110102-3918165882'
]
children: ['job-cloneofhelloworldls94q-1-3667110102-3867833025'],
outboundNodes: ['job-cloneofhelloworldls94q-1-3667110102-3918165882'],
},
'job-cloneofhelloworldls94q-1-3667110102-3817500168': {
id: 'job-cloneofhelloworldls94q-1-3667110102-3817500168',
@ -177,13 +164,11 @@ export default {
parameters: [
{
name: 'message',
value: 'hello world from node: B'
}
]
value: 'hello world from node: B',
},
],
},
children: [
'job-cloneofhelloworldls94q-1-3667110102-3918165882'
]
children: ['job-cloneofhelloworldls94q-1-3667110102-3918165882'],
},
'job-cloneofhelloworldls94q-1-3667110102-3834277787': {
id: 'job-cloneofhelloworldls94q-1-3667110102-3834277787',
@ -199,13 +184,11 @@ export default {
parameters: [
{
name: 'message',
value: 'hello world from node: C'
}
]
value: 'hello world from node: C',
},
],
},
children: [
'job-cloneofhelloworldls94q-1-3667110102-3918165882'
]
children: ['job-cloneofhelloworldls94q-1-3667110102-3918165882'],
},
'job-cloneofhelloworldls94q-1-3667110102-3867833025': {
id: 'job-cloneofhelloworldls94q-1-3667110102-3867833025',
@ -221,14 +204,14 @@ export default {
parameters: [
{
name: 'message',
value: 'hello world from node: A'
}
]
value: 'hello world from node: A',
},
],
},
children: [
'job-cloneofhelloworldls94q-1-3667110102-3817500168',
'job-cloneofhelloworldls94q-1-3667110102-3834277787'
]
'job-cloneofhelloworldls94q-1-3667110102-3834277787',
],
},
'job-cloneofhelloworldls94q-1-3667110102-3918165882': {
id: 'job-cloneofhelloworldls94q-1-3667110102-3918165882',
@ -244,11 +227,11 @@ export default {
parameters: [
{
name: 'message',
value: 'hello world from node: D'
}
]
}
}
}
}
value: 'hello world from node: D',
},
],
},
},
},
},
};

View File

@ -24,8 +24,8 @@ export default {
creationTimestamp: '2018-06-06T00:04:49Z',
labels: {
'workflows.argoproj.io/completed': 'true',
'workflows.argoproj.io/phase': 'Succeeded'
}
'workflows.argoproj.io/phase': 'Succeeded',
},
},
spec: {
templates: [
@ -37,25 +37,21 @@ export default {
container: {
name: '',
image: 'docker/whalesay:latest',
command: [
'cowsay'
],
args: [
'{{workflow.parameters.message}}'
],
resources: {}
}
}
command: ['cowsay'],
args: ['{{workflow.parameters.message}}'],
resources: {},
},
},
],
entrypoint: 'whalesay1',
arguments: {
parameters: [
{
name: 'message',
value: 'hello world'
}
]
}
value: 'hello world',
},
],
},
},
status: {
phase: 'Succeeded',
@ -76,24 +72,20 @@ export default {
{
name: 'JSON Data',
value: JSON.stringify({
'string1': 'a',
'string2': 'b',
'number1': 1,
'number2': 2.2,
'object': {
'string': 'a',
'number': 2
string1: 'a',
string2: 'b',
number1: 1,
number2: 2.2,
object: {
string: 'a',
number: 2,
},
'array': [
'a',
'b',
'c'
]
})
}
]
}
}
}
}
array: ['a', 'b', 'c'],
}),
},
],
},
},
},
},
};

View File

@ -53,7 +53,6 @@ interface BaseResource {
// tslint:disable-next-line:no-default-export
export default (app: express.Application) => {
app.use((req, _, next) => {
// tslint:disable-next-line:no-console
console.info(req.method + ' ' + req.originalUrl);
@ -71,7 +70,7 @@ export default (app: express.Application) => {
apiServerCommitHash: 'd3c4add0a95e930c70a330466d0923827784eb9a',
apiServerReady: true,
buildDate: 'Wed Jan 9 19:40:24 UTC 2019',
frontendCommitHash: '8efb2fcff9f666ba5b101647e909dc9c6889cecb'
frontendCommitHash: '8efb2fcff9f666ba5b101647e909dc9c6889cecb',
});
});
@ -79,7 +78,10 @@ export default (app: express.Application) => {
res.sendStatus(200);
});
function getSortKeyAndOrder(defaultSortKey: string, queryParam?: string): { desc: boolean, key: string } {
function getSortKeyAndOrder(
defaultSortKey: string,
queryParam?: string,
): { desc: boolean; key: string } {
let key = defaultSortKey;
let desc = false;
@ -88,8 +90,10 @@ export default (app: express.Application) => {
key = keyParts[0];
// Check that the key is properly formatted.
if (keyParts.length > 2 ||
(keyParts.length === 2 && keyParts[1] !== 'asc' && keyParts[1] !== 'desc')) {
if (
keyParts.length > 2 ||
(keyParts.length === 2 && keyParts[1] !== 'asc' && keyParts[1] !== 'desc')
) {
throw new Error(`Invalid sort string: ${queryParam}`);
}
@ -124,7 +128,7 @@ export default (app: express.Application) => {
return result * (desc ? -1 : 1);
});
const start = (req.query.page_token ? +req.query.page_token : 0);
const start = req.query.page_token ? +req.query.page_token : 0;
const end = start + (+req.query.page_size || 20);
response.jobs = jobs.slice(start, end);
@ -161,7 +165,7 @@ export default (app: express.Application) => {
return result * (desc ? -1 : 1);
});
const start = (req.query.pageToken ? +req.query.pageToken : 0);
const start = req.query.pageToken ? +req.query.pageToken : 0;
const end = start + (+req.query.pageSize || 20);
response.experiments = experiments.slice(start, end);
@ -186,10 +190,9 @@ export default (app: express.Application) => {
}, 1000);
});
app.get(v1beta1Prefix + '/experiments/:eid', (req, res) => {
res.header('Content-Type', 'application/json');
const experiment = fixedData.experiments.find((exp) => exp.id === req.params.eid);
const experiment = fixedData.experiments.find(exp => exp.id === req.params.eid);
if (!experiment) {
res.status(404).send(`No experiment was found with ID: ${req.params.eid}`);
return;
@ -227,7 +230,7 @@ export default (app: express.Application) => {
res.header('Content-Type', 'application/json');
switch (req.method) {
case 'DELETE':
const i = fixedData.jobs.findIndex((j) => j.id === req.params.jid);
const i = fixedData.jobs.findIndex(j => j.id === req.params.jid);
if (fixedData.jobs[i].name!.startsWith('Cannot be deleted')) {
res.status(502).send(`Deletion failed for job: '${fixedData.jobs[i].name}'`);
} else {
@ -237,7 +240,7 @@ export default (app: express.Application) => {
}
break;
case 'GET':
const job = fixedData.jobs.find((j) => j.id === req.params.jid);
const job = fixedData.jobs.find(j => j.id === req.params.jid);
if (job) {
res.json(job);
} else {
@ -257,15 +260,20 @@ export default (app: express.Application) => {
runs: [],
};
let runs: ApiRun[] = fixedData.runs.map((r) => r.run!);
let runs: ApiRun[] = fixedData.runs.map(r => r.run!);
if (req.query.filter) {
runs = filterResources(runs, req.query.filter);
}
if (req.query['resource_reference_key.type'] === ApiResourceType.EXPERIMENT) {
runs = runs.filter((r) => RunUtils.getAllExperimentReferences(r)
.some((ref) => ref.key && ref.key.id && ref.key.id === req.query['resource_reference_key.id'] || false));
runs = runs.filter(r =>
RunUtils.getAllExperimentReferences(r).some(
ref =>
(ref.key && ref.key.id && ref.key.id === req.query['resource_reference_key.id']) ||
false,
),
);
}
const { desc, key } = getSortKeyAndOrder(RunSortKeys.CREATED_AT, req.query.sort_by);
@ -281,7 +289,7 @@ export default (app: express.Application) => {
return result * (desc ? -1 : 1);
});
const start = (req.query.page_token ? +req.query.page_token : 0);
const start = req.query.page_token ? +req.query.page_token : 0;
const end = start + (+req.query.page_size || 20);
response.runs = runs.slice(start, end);
@ -294,7 +302,7 @@ export default (app: express.Application) => {
app.get(v1beta1Prefix + '/runs/:rid', (req, res) => {
const rid = req.params.rid;
const run = fixedData.runs.find((r) => r.run!.id === rid);
const run = fixedData.runs.find(r => r.run!.id === rid);
if (!run) {
res.status(404).send('Cannot find a run with id: ' + rid);
return;
@ -327,8 +335,8 @@ export default (app: express.Application) => {
}
const runDetail = fixedData.runs.find(r => r.run!.id === req.params.rid);
if (runDetail) {
runDetail.run!.storage_state = req.params.method === 'archive' ?
RunStorageState.ARCHIVED : RunStorageState.AVAILABLE;
runDetail.run!.storage_state =
req.params.method === 'archive' ? RunStorageState.ARCHIVED : RunStorageState.AVAILABLE;
res.json({});
} else {
res.status(500).send('Cannot find a run with id ' + req.params.rid);
@ -337,7 +345,7 @@ export default (app: express.Application) => {
app.post(v1beta1Prefix + '/jobs/:jid/enable', (req, res) => {
setTimeout(() => {
const job = fixedData.jobs.find((j) => j.id === req.params.jid);
const job = fixedData.jobs.find(j => j.id === req.params.jid);
if (job) {
job.enabled = true;
res.json({});
@ -349,7 +357,7 @@ export default (app: express.Application) => {
app.post(v1beta1Prefix + '/jobs/:jid/disable', (req, res) => {
setTimeout(() => {
const job = fixedData.jobs.find((j) => j.id === req.params.jid);
const job = fixedData.jobs.find(j => j.id === req.params.jid);
if (job) {
job.enabled = false;
res.json({});
@ -369,15 +377,22 @@ export default (app: express.Application) => {
switch (p.op) {
case PredicateOp.EQUALS:
if (p.key === 'name') {
return r.name && r.name.toLocaleLowerCase() === (p.string_value || '').toLocaleLowerCase();
return (
r.name && r.name.toLocaleLowerCase() === (p.string_value || '').toLocaleLowerCase()
);
} else if (p.key === 'storage_state') {
return (r as ApiRun).storage_state && (r as ApiRun).storage_state!.toString() === p.string_value;
return (
(r as ApiRun).storage_state &&
(r as ApiRun).storage_state!.toString() === p.string_value
);
} else {
throw new Error(`Key: ${p.key} is not yet supported by the mock API server`);
}
case PredicateOp.NOTEQUALS:
if (p.key === 'name') {
return r.name && r.name.toLocaleLowerCase() !== (p.string_value || '').toLocaleLowerCase();
return (
r.name && r.name.toLocaleLowerCase() !== (p.string_value || '').toLocaleLowerCase()
);
} else if (p.key === 'storage_state') {
return ((r as ApiRun).storage_state || {}).toString() !== p.string_value;
} else {
@ -387,7 +402,10 @@ export default (app: express.Application) => {
if (p.key !== 'name') {
throw new Error(`Key: ${p.key} is not yet supported by the mock API server`);
}
return r.name && r.name.toLocaleLowerCase().includes((p.string_value || '').toLocaleLowerCase());
return (
r.name &&
r.name.toLocaleLowerCase().includes((p.string_value || '').toLocaleLowerCase())
);
case PredicateOp.NOTEQUALS:
// Fall through
case PredicateOp.GREATERTHAN:
@ -432,7 +450,7 @@ export default (app: express.Application) => {
return result * (desc ? -1 : 1);
});
const start = (req.query.page_token ? +req.query.page_token : 0);
const start = req.query.page_token ? +req.query.page_token : 0;
const end = start + (+req.query.page_size || 20);
response.pipelines = pipelines.slice(start, end);
@ -445,7 +463,7 @@ export default (app: express.Application) => {
app.delete(v1beta1Prefix + '/pipelines/:pid', (req, res) => {
res.header('Content-Type', 'application/json');
const i = fixedData.pipelines.findIndex((p) => p.id === req.params.pid);
const i = fixedData.pipelines.findIndex(p => p.id === req.params.pid);
if (i === -1) {
res.status(404).send(`No pipelines was found with ID: ${req.params.pid}`);
@ -463,7 +481,7 @@ export default (app: express.Application) => {
app.get(v1beta1Prefix + '/pipelines/:pid', (req, res) => {
res.header('Content-Type', 'application/json');
const pipeline = fixedData.pipelines.find((p) => p.id === req.params.pid);
const pipeline = fixedData.pipelines.find(p => p.id === req.params.pid);
if (!pipeline) {
res.status(404).send(`No pipeline was found with ID: ${req.params.pid}`);
return;
@ -473,7 +491,7 @@ export default (app: express.Application) => {
app.get(v1beta1Prefix + '/pipelines/:pid/templates', (req, res) => {
res.header('Content-Type', 'text/x-yaml');
const pipeline = fixedData.pipelines.find((p) => p.id === req.params.pid);
const pipeline = fixedData.pipelines.find(p => p.id === req.params.pid);
if (!pipeline) {
res.status(404).send(`No pipeline was found with ID: ${req.params.pid}`);
return;
@ -492,9 +510,10 @@ export default (app: express.Application) => {
function mockCreatePipeline(res: Response, name: string, body?: any): void {
res.header('Content-Type', 'application/json');
// Don't allow uploading multiple pipelines with the same name
if (fixedData.pipelines.find((p) => p.name === name)) {
res.status(502).send(
`A Pipeline named: "${name}" already exists. Please choose a different name.`);
if (fixedData.pipelines.find(p => p.name === name)) {
res
.status(502)
.send(`A Pipeline named: "${name}" already exists. Please choose a different name.`);
} else {
const pipeline = body || {};
pipeline.id = 'new-pipeline-' + (fixedData.pipelines.length + 1);
@ -504,13 +523,13 @@ export default (app: express.Application) => {
'TODO: the mock middleware does not actually use the uploaded pipeline';
pipeline.parameters = [
{
name: 'output'
name: 'output',
},
{
name: 'param-1'
name: 'param-1',
},
{
name: 'param-2'
name: 'param-2',
},
];
fixedData.pipelines.push(pipeline);

View File

@ -24,8 +24,8 @@ export default {
creationTimestamp: '2018-04-17T20:58:23Z',
labels: {
'workflows.argoproj.io/completed': 'true',
'workflows.argoproj.io/phase': 'Succeeded'
}
'workflows.argoproj.io/phase': 'Succeeded',
},
},
spec: {
templates: [
@ -39,24 +39,24 @@ export default {
{
name: 'flip-coin',
template: 'flip-coin',
arguments: {}
}
arguments: {},
},
],
[
{
name: 'heads',
template: 'heads',
arguments: {},
when: '{{steps.flip-coin.outputs.result}} == heads'
when: '{{steps.flip-coin.outputs.result}} == heads',
},
{
name: 'tails',
template: 'coinflip',
arguments: {},
when: '{{steps.flip-coin.outputs.result}} == tails'
}
]
]
when: '{{steps.flip-coin.outputs.result}} == tails',
},
],
],
},
{
name: 'flip-coin',
@ -66,13 +66,12 @@ export default {
script: {
name: '',
image: 'python:alpine3.6',
command: [
'python'
],
command: ['python'],
resources: {},
// tslint:disable-next-line:max-line-length
source: 'import random\nresult = "heads" if random.randint(0,1) == 0 else "tails"\nprint(result)\n'
}
source:
'import random\nresult = "heads" if random.randint(0,1) == 0 else "tails"\nprint(result)\n',
},
},
{
name: 'heads',
@ -82,30 +81,25 @@ export default {
container: {
name: '',
image: 'alpine:3.6',
command: [
'sh',
'-c'
],
args: [
'echo "it was heads"'
],
resources: {}
}
}
command: ['sh', '-c'],
args: ['echo "it was heads"'],
resources: {},
},
},
],
entrypoint: 'coinflip',
arguments: {
parameters: [
{
name: 'x',
value: 10
value: 10,
},
{
name: 'y',
value: 20
}
]
}
value: 20,
},
],
},
},
status: {
phase: 'Succeeded',
@ -121,78 +115,74 @@ export default {
phase: 'Succeeded',
startedAt: '2018-04-17T20:58:23Z',
finishedAt: '2018-04-17T20:58:38Z',
children: [
'coinflip-recursive-q7dqb-1787723858',
'coinflip-recursive-q7dqb-1720466287'
],
outboundNodes: [
'coinflip-recursive-q7dqb-3721646052'
]
children: ['coinflip-recursive-q7dqb-1787723858', 'coinflip-recursive-q7dqb-1720466287'],
outboundNodes: ['coinflip-recursive-q7dqb-3721646052'],
},
'coinflip-recursive-q7dqb-1720466287': {
id: 'coinflip-recursive-q7dqb-1720466287',
name: 'coinflip-recursive-q7dqb[1]',
displayName: '[1]',
outputs: {
artifacts: [{
name: 'mlpipeline-ui-metadata',
s3: {
bucket: 'somebucket',
key: 'staging',
artifacts: [
{
name: 'mlpipeline-ui-metadata',
s3: {
bucket: 'somebucket',
key: 'staging',
},
},
}],
],
},
type: 'StepGroup',
phase: 'Succeeded',
boundaryID: 'coinflip-recursive-q7dqb',
startedAt: '2018-04-17T20:58:28Z',
finishedAt: '2018-04-17T20:58:38Z',
children: [
'coinflip-recursive-q7dqb-4011569486',
'coinflip-recursive-q7dqb-3266226990'
]
children: ['coinflip-recursive-q7dqb-4011569486', 'coinflip-recursive-q7dqb-3266226990'],
},
'coinflip-recursive-q7dqb-1787723858': {
id: 'coinflip-recursive-q7dqb-1787723858',
name: 'coinflip-recursive-q7dqb[0]',
displayName: '[0]',
outputs: {
artifacts: [{
name: 'mlpipeline-ui-metadata',
s3: {
bucket: 'somebucket',
key: 'analysis2',
artifacts: [
{
name: 'mlpipeline-ui-metadata',
s3: {
bucket: 'somebucket',
key: 'analysis2',
},
},
}],
],
},
type: 'StepGroup',
phase: 'Succeeded',
boundaryID: 'coinflip-recursive-q7dqb',
startedAt: '2018-04-17T20:58:23Z',
finishedAt: '2018-04-17T20:58:28Z',
children: [
'coinflip-recursive-q7dqb-311338607'
]
children: ['coinflip-recursive-q7dqb-311338607'],
},
'coinflip-recursive-q7dqb-2934726852': {
id: 'coinflip-recursive-q7dqb-2934726852',
name: 'coinflip-recursive-q7dqb[1].tails[1].tails',
displayName: 'tails',
outputs: {
artifacts: [{
name: 'mlpipeline-ui-metadata',
s3: {
bucket: 'somebucket',
key: 'transform',
artifacts: [
{
name: 'mlpipeline-ui-metadata',
s3: {
bucket: 'somebucket',
key: 'transform',
},
},
}],
],
},
type: 'Skipped',
phase: 'Skipped',
boundaryID: 'coinflip-recursive-q7dqb-3266226990',
message: 'when \'heads == tails\' evaluated false',
message: "when 'heads == tails' evaluated false",
startedAt: '2018-04-17T20:58:34Z',
finishedAt: '2018-04-17T20:58:34Z'
finishedAt: '2018-04-17T20:58:34Z',
},
'coinflip-recursive-q7dqb-311338607': {
id: 'coinflip-recursive-q7dqb-311338607',
@ -205,23 +195,23 @@ export default {
startedAt: '2018-04-17T20:58:23Z',
finishedAt: '2018-04-17T20:58:28Z',
outputs: {
artifacts: [{
name: 'mlpipeline-ui-metadata',
s3: {
bucket: 'somebucket',
key: 'model2',
artifacts: [
{
name: 'mlpipeline-ui-metadata',
s3: {
bucket: 'somebucket',
key: 'model2',
},
},
}],
],
parameters: [
{
name: 'result',
value: 'tails'
}
]
value: 'tails',
},
],
},
children: [
'coinflip-recursive-q7dqb-1720466287'
]
children: ['coinflip-recursive-q7dqb-1720466287'],
},
'coinflip-recursive-q7dqb-3266226990': {
id: 'coinflip-recursive-q7dqb-3266226990',
@ -233,13 +223,8 @@ export default {
boundaryID: 'coinflip-recursive-q7dqb',
startedAt: '2018-04-17T20:58:28Z',
finishedAt: '2018-04-17T20:58:38Z',
children: [
'coinflip-recursive-q7dqb-4010083248',
'coinflip-recursive-q7dqb-855846949'
],
outboundNodes: [
'coinflip-recursive-q7dqb-3721646052'
]
children: ['coinflip-recursive-q7dqb-4010083248', 'coinflip-recursive-q7dqb-855846949'],
outboundNodes: ['coinflip-recursive-q7dqb-3721646052'],
},
'coinflip-recursive-q7dqb-3466727817': {
id: 'coinflip-recursive-q7dqb-3466727817',
@ -255,13 +240,11 @@ export default {
parameters: [
{
name: 'result',
value: 'heads'
}
]
value: 'heads',
},
],
},
children: [
'coinflip-recursive-q7dqb-855846949'
]
children: ['coinflip-recursive-q7dqb-855846949'],
},
'coinflip-recursive-q7dqb-3721646052': {
id: 'coinflip-recursive-q7dqb-3721646052',
@ -272,7 +255,7 @@ export default {
phase: 'Succeeded',
boundaryID: 'coinflip-recursive-q7dqb-3266226990',
startedAt: '2018-04-17T20:58:34Z',
finishedAt: '2018-04-17T20:58:37Z'
finishedAt: '2018-04-17T20:58:37Z',
},
'coinflip-recursive-q7dqb-4010083248': {
id: 'coinflip-recursive-q7dqb-4010083248',
@ -283,9 +266,7 @@ export default {
boundaryID: 'coinflip-recursive-q7dqb-3266226990',
startedAt: '2018-04-17T20:58:28Z',
finishedAt: '2018-04-17T20:58:34Z',
children: [
'coinflip-recursive-q7dqb-3466727817'
]
children: ['coinflip-recursive-q7dqb-3466727817'],
},
'coinflip-recursive-q7dqb-4011569486': {
id: 'coinflip-recursive-q7dqb-4011569486',
@ -294,9 +275,9 @@ export default {
type: 'Skipped',
phase: 'Skipped',
boundaryID: 'coinflip-recursive-q7dqb',
message: 'when \'tails == heads\' evaluated false',
message: "when 'tails == heads' evaluated false",
startedAt: '2018-04-17T20:58:28Z',
finishedAt: '2018-04-17T20:58:28Z'
finishedAt: '2018-04-17T20:58:28Z',
},
'coinflip-recursive-q7dqb-855846949': {
id: 'coinflip-recursive-q7dqb-855846949',
@ -307,11 +288,8 @@ export default {
boundaryID: 'coinflip-recursive-q7dqb-3266226990',
startedAt: '2018-04-17T20:58:34Z',
finishedAt: '2018-04-17T20:58:38Z',
children: [
'coinflip-recursive-q7dqb-3721646052',
'coinflip-recursive-q7dqb-2934726852'
]
}
}
}
children: ['coinflip-recursive-q7dqb-3721646052', 'coinflip-recursive-q7dqb-2934726852'],
},
},
},
};

View File

@ -18,14 +18,15 @@ export default {
name: 'coinflip-error-nklng2',
namespace: 'default',
// tslint:disable-next-line:max-line-length
selfLink: '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/coinflip-heads-c085010d-771a-4cdf-979c-257e991501b5',
selfLink:
'/apis/argoproj.io/v1alpha1/namespaces/default/workflows/coinflip-heads-c085010d-771a-4cdf-979c-257e991501b5',
uid: '47a3d09c-7db4-4788-ac55-3f8d908574aa',
resourceVersion: '10527150',
creationTimestamp: '2018-06-11T22:49:26Z',
labels: {
'workflows.argoproj.io/completed': 'true',
'workflows.argoproj.io/phase': 'Failed'
}
'workflows.argoproj.io/phase': 'Failed',
},
},
spec: {
templates: [
@ -40,10 +41,10 @@ export default {
name: 'heads',
template: 'heads',
arguments: {},
when: '{{steps.flip-coin.outputs.result}} == heads'
}
]
]
when: '{{steps.flip-coin.outputs.result}} == heads',
},
],
],
},
{
name: 'heads',
@ -53,25 +54,21 @@ export default {
container: {
name: '',
image: 'alpine:3.6',
command: [
'sh',
'-c'
],
args: [
'echo "it was heads"'
],
resources: {}
}
}
command: ['sh', '-c'],
args: ['echo "it was heads"'],
resources: {},
},
},
],
entrypoint: 'coinflip',
arguments: {}
arguments: {},
},
status: {
phase: 'Failed',
startedAt: '2018-06-11T22:49:26Z',
finishedAt: '2018-06-11T22:49:26Z',
// tslint:disable-next-line:max-line-length
message: 'invalid spec: templates.coinflip.steps[0].heads failed to resolve {{steps.flip-coin.outputs.result}}'
}
message:
'invalid spec: templates.coinflip.steps[0].heads failed to resolve {{steps.flip-coin.outputs.result}}',
},
};

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,8 @@ export default {
metadata: {
name: 'job-xgboosttrainingm7t2r-1-2537408167',
namespace: 'default',
selfLink: '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/job-xgboosttrainingm7t2r-1-2537408167',
selfLink:
'/apis/argoproj.io/v1alpha1/namespaces/default/workflows/job-xgboosttrainingm7t2r-1-2537408167',
uid: '3333210c-cdef-11e8-8c17-42010a8a0078',
resourceVersion: '24210',
creationTimestamp: '2018-10-12T07:19:47Z',
@ -26,7 +27,7 @@ export default {
'scheduledworkflows.kubeflow.org/scheduledWorkflowName': 'job-xgboosttrainingm7t2r',
'scheduledworkflows.kubeflow.org/workflowEpoch': '1539328777',
'scheduledworkflows.kubeflow.org/workflowIndex': '1',
'workflows.argoproj.io/phase': 'Running'
'workflows.argoproj.io/phase': 'Running',
},
ownerReferences: [
{
@ -35,9 +36,9 @@ export default {
name: 'job-xgboosttrainingm7t2r',
uid: '2d3b0ed1-cdef-11e8-8c17-42010a8a0078',
controller: true,
blockOwnerDeletion: true
}
]
blockOwnerDeletion: true,
},
],
},
spec: {
templates: [
@ -46,25 +47,25 @@ export default {
inputs: {
parameters: [
{
name: 'create-cluster-output'
name: 'create-cluster-output',
},
{
name: 'output'
name: 'output',
},
{
name: 'project'
}
]
name: 'project',
},
],
},
outputs: {
parameters: [
{
name: 'analyze-output',
valueFrom: {
path: '/output.txt'
}
}
]
path: '/output.txt',
},
},
],
},
metadata: {},
container: {
@ -82,22 +83,22 @@ export default {
'--train',
'gs://ml-pipeline-playground/sfpd/train.csv',
'--output',
'{{inputs.parameters.output}}/{{workflow.name}}/analysis'
'{{inputs.parameters.output}}/{{workflow.name}}/analysis',
],
resources: {}
}
resources: {},
},
},
{
name: 'confusion-matrix',
inputs: {
parameters: [
{
name: 'output'
name: 'output',
},
{
name: 'predict-output'
}
]
name: 'predict-output',
},
],
},
outputs: {},
metadata: {},
@ -108,32 +109,32 @@ export default {
'--output',
'{{inputs.parameters.output}}/{{workflow.name}}/confusionmatrix',
'--predictions',
'{{inputs.parameters.predict-output}}'
'{{inputs.parameters.predict-output}}',
],
resources: {}
}
resources: {},
},
},
{
name: 'create-cluster',
inputs: {
parameters: [
{
name: 'output'
name: 'output',
},
{
name: 'project'
}
]
name: 'project',
},
],
},
outputs: {
parameters: [
{
name: 'create-cluster-output',
valueFrom: {
path: '/output.txt'
}
}
]
path: '/output.txt',
},
},
],
},
metadata: {},
container: {
@ -147,19 +148,19 @@ export default {
'--name',
'xgb-{{workflow.name}}',
'--staging',
'{{inputs.parameters.output}}'
'{{inputs.parameters.output}}',
],
resources: {}
}
resources: {},
},
},
{
name: 'delete-cluster',
inputs: {
parameters: [
{
name: 'project'
}
]
name: 'project',
},
],
},
outputs: {},
metadata: {},
@ -172,22 +173,22 @@ export default {
'--region',
'us-central1',
'--name',
'xgb-{{workflow.name}}'
'xgb-{{workflow.name}}',
],
resources: {}
}
resources: {},
},
},
{
name: 'exit-handler-1',
inputs: {
parameters: [
{
name: 'output'
name: 'output',
},
{
name: 'project'
}
]
name: 'project',
},
],
},
outputs: {},
metadata: {},
@ -200,21 +201,19 @@ export default {
parameters: [
{
name: 'create-cluster-output',
value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}'
value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}',
},
{
name: 'output',
value: '{{inputs.parameters.output}}'
value: '{{inputs.parameters.output}}',
},
{
name: 'project',
value: '{{inputs.parameters.project}}'
}
]
value: '{{inputs.parameters.project}}',
},
],
},
dependencies: [
'create-cluster'
]
dependencies: ['create-cluster'],
},
{
name: 'confusion-matrix',
@ -223,17 +222,15 @@ export default {
parameters: [
{
name: 'output',
value: '{{inputs.parameters.output}}'
value: '{{inputs.parameters.output}}',
},
{
name: 'predict-output',
value: '{{tasks.predict.outputs.parameters.predict-output}}'
}
]
value: '{{tasks.predict.outputs.parameters.predict-output}}',
},
],
},
dependencies: [
'predict'
]
dependencies: ['predict'],
},
{
name: 'create-cluster',
@ -242,14 +239,14 @@ export default {
parameters: [
{
name: 'output',
value: '{{inputs.parameters.output}}'
value: '{{inputs.parameters.output}}',
},
{
name: 'project',
value: '{{inputs.parameters.project}}'
}
]
}
value: '{{inputs.parameters.project}}',
},
],
},
},
{
name: 'predict',
@ -258,36 +255,31 @@ export default {
parameters: [
{
name: 'analyze-output',
value: '{{tasks.analyze.outputs.parameters.analyze-output}}'
value: '{{tasks.analyze.outputs.parameters.analyze-output}}',
},
{
name: 'create-cluster-output',
value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}'
value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}',
},
{
name: 'output',
value: '{{inputs.parameters.output}}'
value: '{{inputs.parameters.output}}',
},
{
name: 'project',
value: '{{inputs.parameters.project}}'
value: '{{inputs.parameters.project}}',
},
{
name: 'train-output',
value: '{{tasks.train.outputs.parameters.train-output}}'
value: '{{tasks.train.outputs.parameters.train-output}}',
},
{
name: 'transform-eval',
value: '{{tasks.transform.outputs.parameters.transform-eval}}'
}
]
value: '{{tasks.transform.outputs.parameters.transform-eval}}',
},
],
},
dependencies: [
'analyze',
'create-cluster',
'train',
'transform'
]
dependencies: ['analyze', 'create-cluster', 'train', 'transform'],
},
{
name: 'roc',
@ -296,17 +288,15 @@ export default {
parameters: [
{
name: 'output',
value: '{{inputs.parameters.output}}'
value: '{{inputs.parameters.output}}',
},
{
name: 'predict-output',
value: '{{tasks.predict.outputs.parameters.predict-output}}'
}
]
value: '{{tasks.predict.outputs.parameters.predict-output}}',
},
],
},
dependencies: [
'predict'
]
dependencies: ['predict'],
},
{
name: 'train',
@ -315,35 +305,31 @@ export default {
parameters: [
{
name: 'analyze-output',
value: '{{tasks.analyze.outputs.parameters.analyze-output}}'
value: '{{tasks.analyze.outputs.parameters.analyze-output}}',
},
{
name: 'create-cluster-output',
value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}'
value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}',
},
{
name: 'output',
value: '{{inputs.parameters.output}}'
value: '{{inputs.parameters.output}}',
},
{
name: 'project',
value: '{{inputs.parameters.project}}'
value: '{{inputs.parameters.project}}',
},
{
name: 'transform-eval',
value: '{{tasks.transform.outputs.parameters.transform-eval}}'
value: '{{tasks.transform.outputs.parameters.transform-eval}}',
},
{
name: 'transform-train',
value: '{{tasks.transform.outputs.parameters.transform-train}}'
}
]
value: '{{tasks.transform.outputs.parameters.transform-train}}',
},
],
},
dependencies: [
'analyze',
'create-cluster',
'transform'
]
dependencies: ['analyze', 'create-cluster', 'transform'],
},
{
name: 'transform',
@ -352,63 +338,60 @@ export default {
parameters: [
{
name: 'analyze-output',
value: '{{tasks.analyze.outputs.parameters.analyze-output}}'
value: '{{tasks.analyze.outputs.parameters.analyze-output}}',
},
{
name: 'create-cluster-output',
value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}'
value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}',
},
{
name: 'output',
value: '{{inputs.parameters.output}}'
value: '{{inputs.parameters.output}}',
},
{
name: 'project',
value: '{{inputs.parameters.project}}'
}
]
value: '{{inputs.parameters.project}}',
},
],
},
dependencies: [
'analyze',
'create-cluster'
]
}
]
}
dependencies: ['analyze', 'create-cluster'],
},
],
},
},
{
name: 'predict',
inputs: {
parameters: [
{
name: 'analyze-output'
name: 'analyze-output',
},
{
name: 'create-cluster-output'
name: 'create-cluster-output',
},
{
name: 'output'
name: 'output',
},
{
name: 'project'
name: 'project',
},
{
name: 'train-output'
name: 'train-output',
},
{
name: 'transform-eval'
}
]
name: 'transform-eval',
},
],
},
outputs: {
parameters: [
{
name: 'predict-output',
valueFrom: {
path: '/output.txt'
}
}
]
path: '/output.txt',
},
},
],
},
metadata: {},
container: {
@ -432,22 +415,22 @@ export default {
'--model',
'{{inputs.parameters.train-output}}',
'--output',
'{{inputs.parameters.output}}/{{workflow.name}}/predict'
'{{inputs.parameters.output}}/{{workflow.name}}/predict',
],
resources: {}
}
resources: {},
},
},
{
name: 'roc',
inputs: {
parameters: [
{
name: 'output'
name: 'output',
},
{
name: 'predict-output'
}
]
name: 'predict-output',
},
],
},
outputs: {},
metadata: {},
@ -460,44 +443,44 @@ export default {
'--predictions',
'{{inputs.parameters.predict-output}}',
'--trueclass',
'ACTION'
'ACTION',
],
resources: {}
}
resources: {},
},
},
{
name: 'train',
inputs: {
parameters: [
{
name: 'analyze-output'
name: 'analyze-output',
},
{
name: 'create-cluster-output'
name: 'create-cluster-output',
},
{
name: 'output'
name: 'output',
},
{
name: 'project'
name: 'project',
},
{
name: 'transform-eval'
name: 'transform-eval',
},
{
name: 'transform-train'
}
]
name: 'transform-train',
},
],
},
outputs: {
parameters: [
{
name: 'train-output',
valueFrom: {
path: '/output.txt'
}
}
]
path: '/output.txt',
},
},
],
},
metadata: {},
container: {
@ -527,44 +510,44 @@ export default {
'--conf',
'gs://ml-pipeline-playground/trainconfcla.json',
'--output',
'{{inputs.parameters.output}}/{{workflow.name}}/model'
'{{inputs.parameters.output}}/{{workflow.name}}/model',
],
resources: {}
}
resources: {},
},
},
{
name: 'transform',
inputs: {
parameters: [
{
name: 'analyze-output'
name: 'analyze-output',
},
{
name: 'create-cluster-output'
name: 'create-cluster-output',
},
{
name: 'output'
name: 'output',
},
{
name: 'project'
}
]
name: 'project',
},
],
},
outputs: {
parameters: [
{
name: 'transform-eval',
valueFrom: {
path: '/output_eval.txt'
}
path: '/output_eval.txt',
},
},
{
name: 'transform-train',
valueFrom: {
path: '/output_train.txt'
}
}
]
path: '/output_train.txt',
},
},
],
},
metadata: {},
container: {
@ -586,22 +569,22 @@ export default {
'--target',
'resolution',
'--output',
'{{inputs.parameters.output}}/{{workflow.name}}/transform'
'{{inputs.parameters.output}}/{{workflow.name}}/transform',
],
resources: {}
}
resources: {},
},
},
{
name: 'xgboosttrainer',
inputs: {
parameters: [
{
name: 'output'
name: 'output',
},
{
name: 'project'
}
]
name: 'project',
},
],
},
outputs: {},
metadata: {},
@ -614,65 +597,65 @@ export default {
parameters: [
{
name: 'output',
value: '{{inputs.parameters.output}}'
value: '{{inputs.parameters.output}}',
},
{
name: 'project',
value: '{{inputs.parameters.project}}'
}
]
}
}
]
}
}
value: '{{inputs.parameters.project}}',
},
],
},
},
],
},
},
],
entrypoint: 'xgboosttrainer',
arguments: {
parameters: [
{
name: 'output',
value: 'gs://yelsayed-2/xgboost'
value: 'gs://yelsayed-2/xgboost',
},
{
name: 'project',
value: 'yelsayed-2'
value: 'yelsayed-2',
},
{
name: 'region',
value: 'us-central1'
value: 'us-central1',
},
{
name: 'train-data',
value: 'gs://ml-pipeline-playground/sfpd/train.csv'
value: 'gs://ml-pipeline-playground/sfpd/train.csv',
},
{
name: 'eval-data',
value: 'gs://ml-pipeline-playground/sfpd/eval.csv'
value: 'gs://ml-pipeline-playground/sfpd/eval.csv',
},
{
name: 'schema',
value: 'gs://ml-pipeline-playground/sfpd/schema.json'
value: 'gs://ml-pipeline-playground/sfpd/schema.json',
},
{
name: 'target',
value: 'resolution'
value: 'resolution',
},
{
name: 'rounds',
value: '200'
value: '200',
},
{
name: 'workers',
value: '2'
value: '2',
},
{
name: 'true-label',
value: 'ACTION'
}
]
value: 'ACTION',
},
],
},
onExit: 'delete-cluster'
onExit: 'delete-cluster',
},
status: {
phase: 'Running',
@ -692,17 +675,15 @@ export default {
parameters: [
{
name: 'output',
value: 'gs://yelsayed-2/xgboost'
value: 'gs://yelsayed-2/xgboost',
},
{
name: 'project',
value: 'yelsayed-2'
}
]
value: 'yelsayed-2',
},
],
},
children: [
'job-xgboosttrainingm7t2r-1-2537408167-3348277322'
]
children: ['job-xgboosttrainingm7t2r-1-2537408167-3348277322'],
},
'job-xgboosttrainingm7t2r-1-2537408167-294182655': {
id: 'job-xgboosttrainingm7t2r-1-2537408167-294182655',
@ -712,21 +693,22 @@ export default {
templateName: 'create-cluster',
phase: 'Pending',
boundaryID: 'job-xgboosttrainingm7t2r-1-2537408167-3348277322',
message: 'ImagePullBackOff: Back-off pulling image "gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster"',
message:
'ImagePullBackOff: Back-off pulling image "gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster"',
startedAt: '2018-10-12T07:19:47Z',
finishedAt: null,
inputs: {
parameters: [
{
name: 'output',
value: 'gs://yelsayed-2/xgboost'
value: 'gs://yelsayed-2/xgboost',
},
{
name: 'project',
value: 'yelsayed-2'
}
]
}
value: 'yelsayed-2',
},
],
},
},
'job-xgboosttrainingm7t2r-1-2537408167-3348277322': {
id: 'job-xgboosttrainingm7t2r-1-2537408167-3348277322',
@ -742,18 +724,16 @@ export default {
parameters: [
{
name: 'output',
value: 'gs://yelsayed-2/xgboost'
value: 'gs://yelsayed-2/xgboost',
},
{
name: 'project',
value: 'yelsayed-2'
}
]
value: 'yelsayed-2',
},
],
},
children: [
'job-xgboosttrainingm7t2r-1-2537408167-294182655'
]
}
}
}
children: ['job-xgboosttrainingm7t2r-1-2537408167-294182655'],
},
},
},
};

View File

@ -44,8 +44,8 @@
"build": "npm run lint && react-scripts-ts build",
"docker": "COMMIT_HASH=`git rev-parse HEAD`; docker build -q -t ml-pipelines-frontend:${COMMIT_HASH} --build-arg COMMIT_HASH=${COMMIT_HASH} --build-arg DATE=\"`date -u`\" -f Dockerfile ..",
"eject": "react-scripts-ts eject",
"format": "prettier --write 'src/**/*.{ts,tsx}'",
"format:check": "prettier --check 'src/**/*.{ts,tsx}' || node ./scripts/check-format-error-info.js",
"format": "prettier --write './**/*.{ts,tsx}'",
"format:check": "prettier --check './**/*.{ts,tsx}' || node ./scripts/check-format-error-info.js",
"java": "java -version",
"lint": "tslint -c ./tslint.prod.json -p .",
"mock:api": "ts-node-dev -O '{\"module\": \"commonjs\"}' mock-backend/mock-api-server.ts 3001",

View File

@ -15,74 +15,71 @@ import fetch from 'node-fetch';
/** IAWSMetadataCredentials describes the credentials provided by aws metadata store. */
export interface IAWSMetadataCredentials {
Code: string;
LastUpdated: string;
Type: string;
AccessKeyId: string;
SecretAccessKey: string;
Token: string;
Expiration: string;
Code: string;
LastUpdated: string;
Type: string;
AccessKeyId: string;
SecretAccessKey: string;
Token: string;
Expiration: string;
}
/** url for aws metadata store. */
const metadataUrl = "http://169.254.169.254/latest/meta-data/";
const metadataUrl = 'http://169.254.169.254/latest/meta-data/';
/**
* Get the AWS IAM instance profile.
*/
async function getIAMInstanceProfile() : Promise<string|undefined> {
try {
const resp = await fetch(`${metadataUrl}/iam/security-credentials/`);
const profiles = (await resp.text()).split('\n');
if (profiles.length > 0) {
return profiles[0].trim(); // return first profile
}
return;
} catch (error) {
console.error(`Unable to fetch credentials from AWS metadata store: ${error}`)
return;
async function getIAMInstanceProfile(): Promise<string | undefined> {
try {
const resp = await fetch(`${metadataUrl}/iam/security-credentials/`);
const profiles = (await resp.text()).split('\n');
if (profiles.length > 0) {
return profiles[0].trim(); // return first profile
}
return;
} catch (error) {
console.error(`Unable to fetch credentials from AWS metadata store: ${error}`);
return;
}
}
/**
* Class to handle the session credentials for AWS ec2 instance profile.
*/
class AWSInstanceProfileCredentials {
_iamProfilePromise = getIAMInstanceProfile();
_credentials?: IAWSMetadataCredentials;
_expiration: number = 0;
_iamProfilePromise = getIAMInstanceProfile();
_credentials?: IAWSMetadataCredentials;
_expiration: number = 0;
async ok() {
return !!(await this._iamProfilePromise);
async ok() {
return !!(await this._iamProfilePromise);
}
async _fetchCredentials(): Promise<IAWSMetadataCredentials | undefined> {
try {
const profile = await this._iamProfilePromise;
const resp = await fetch(`${metadataUrl}/iam/security-credentials/${profile}`);
return resp.json();
} catch (error) {
console.error(`Unable to fetch credentials from AWS metadata store:${error}`);
return;
}
}
async _fetchCredentials(): Promise<IAWSMetadataCredentials|undefined> {
try {
const profile = await this._iamProfilePromise;
const resp = await fetch(`${metadataUrl}/iam/security-credentials/${profile}`)
return resp.json();
} catch (error) {
console.error(`Unable to fetch credentials from AWS metadata store:${error}`)
return;
}
/**
* Get the AWS metadata store session credentials.
*/
async getCredentials(): Promise<IAWSMetadataCredentials> {
// query for credentials if going to expire or no credentials yet
if (Date.now() + 10 >= this._expiration || !this._credentials) {
this._credentials = await this._fetchCredentials();
if (this._credentials.Expiration)
this._expiration = new Date(this._credentials.Expiration).getTime();
else this._expiration = -1; // always expire
}
/**
* Get the AWS metadata store session credentials.
*/
async getCredentials(): Promise<IAWSMetadataCredentials> {
// query for credentials if going to expire or no credentials yet
if ((Date.now() + 10 >= this._expiration) || !this._credentials) {
this._credentials = await this._fetchCredentials();
if (this._credentials.Expiration)
this._expiration = new Date(this._credentials.Expiration).getTime();
else
this._expiration = -1; // always expire
}
return this._credentials
}
return this._credentials;
}
}
export const awsInstanceProfileCredentials = new AWSInstanceProfileCredentials();
export const awsInstanceProfileCredentials = new AWSInstanceProfileCredentials();

View File

@ -13,11 +13,16 @@
// limitations under the License.
// @ts-ignore
import {Core_v1Api, Custom_objectsApi, KubeConfig, V1ConfigMapKeySelector} from '@kubernetes/client-node';
import {
Core_v1Api,
Custom_objectsApi,
KubeConfig,
V1ConfigMapKeySelector,
} from '@kubernetes/client-node';
import * as crypto from 'crypto-js';
import * as fs from 'fs';
import * as Utils from './utils';
import {IPartialArgoWorkflow} from './workflow-helper';
import { IPartialArgoWorkflow } from './workflow-helper';
// If this is running inside a k8s Pod, its namespace should be written at this
// path, this is also how we can tell whether we're running in the cluster.
@ -32,34 +37,42 @@ const viewerVersion = 'v1beta1';
const viewerPlural = 'viewers';
// Constants for argo workflow
const workflowGroup = 'argoproj.io'
const workflowVersion = 'v1alpha1'
const workflowPlural = 'workflows'
const workflowGroup = 'argoproj.io';
const workflowVersion = 'v1alpha1';
const workflowPlural = 'workflows';
/** Default pod template spec used to create tensorboard viewer. */
export const defaultPodTemplateSpec = {
spec: {
containers: [{
env: [{
name: "GOOGLE_APPLICATION_CREDENTIALS",
value: "/secret/gcp-credentials/user-gcp-sa.json"
}],
volumeMounts: [{
name: "gcp-credentials",
mountPath: "/secret/gcp-credentials/user-gcp-sa.json",
readOnly: true
}]
}],
volumes: [{
name: "gcp-credentials",
volumeSource: {
secret: {
secretName: "user-gcp-sa"
}
}
}]
}
}
containers: [
{
env: [
{
name: 'GOOGLE_APPLICATION_CREDENTIALS',
value: '/secret/gcp-credentials/user-gcp-sa.json',
},
],
volumeMounts: [
{
name: 'gcp-credentials',
mountPath: '/secret/gcp-credentials/user-gcp-sa.json',
readOnly: true,
},
],
},
],
volumes: [
{
name: 'gcp-credentials',
volumeSource: {
secret: {
secretName: 'user-gcp-sa',
},
},
},
],
},
};
export const isInCluster = fs.existsSync(namespaceFilePath);
@ -80,7 +93,10 @@ function getNameOfViewerResource(logdir: string): string {
* Create Tensorboard instance via CRD with the given logdir if there is no
* existing Tensorboard instance.
*/
export async function newTensorboardInstance(logdir: string, podTemplateSpec: Object = defaultPodTemplateSpec): Promise<void> {
export async function newTensorboardInstance(
logdir: string,
podTemplateSpec: Object = defaultPodTemplateSpec,
): Promise<void> {
if (!k8sV1CustomObjectClient) {
throw new Error('Cannot access kubernetes Custom Object API');
}
@ -103,11 +119,16 @@ export async function newTensorboardInstance(logdir: string, podTemplateSpec: Ob
// TODO(jingzhang36): tensorflow image version read from input textbox.
tensorflowImage: 'tensorflow/tensorflow:1.13.2',
},
podTemplateSpec
}
podTemplateSpec,
},
};
await k8sV1CustomObjectClient.createNamespacedCustomObject(viewerGroup,
viewerVersion, namespace, viewerPlural, body);
await k8sV1CustomObjectClient.createNamespacedCustomObject(
viewerGroup,
viewerVersion,
namespace,
viewerPlural,
body,
);
}
/**
@ -119,20 +140,28 @@ export async function getTensorboardInstance(logdir: string): Promise<string> {
throw new Error('Cannot access kubernetes Custom Object API');
}
return await (k8sV1CustomObjectClient.getNamespacedCustomObject(
viewerGroup, viewerVersion, namespace, viewerPlural,
getNameOfViewerResource(logdir))).then(
return await k8sV1CustomObjectClient
.getNamespacedCustomObject(
viewerGroup,
viewerVersion,
namespace,
viewerPlural,
getNameOfViewerResource(logdir),
)
.then(
// Viewer CRD pod has tensorboard instance running at port 6006 while
// viewer CRD service has tensorboard instance running at port 80. Since
// we return service address here (instead of pod address), so use 80.
(viewer: any) => (
viewer && viewer.body &&
(viewer: any) =>
viewer &&
viewer.body &&
viewer.body.spec.tensorboardSpec.logDir == logdir &&
viewer.body.spec.type == 'tensorboard') ?
`http://${viewer.body.metadata.name}-service.${namespace}.svc.cluster.local:80/tensorboard/${viewer.body.metadata.name}/` : '',
viewer.body.spec.type == 'tensorboard'
? `http://${viewer.body.metadata.name}-service.${namespace}.svc.cluster.local:80/tensorboard/${viewer.body.metadata.name}/`
: '',
// No existing custom object with the given name, i.e., no existing
// tensorboard instance.
(error: any) => ''
(error: any) => '',
);
}
@ -159,11 +188,12 @@ export function getPodLogs(podName: string): Promise<string> {
if (!k8sV1Client) {
throw new Error('Cannot access kubernetes API');
}
return (k8sV1Client.readNamespacedPodLog(podName, namespace, 'main') as any)
.then(
(response: any) => (response && response.body) ? response.body.toString() : '',
(error: any) => {throw new Error(JSON.stringify(error.body));}
);
return (k8sV1Client.readNamespacedPodLog(podName, namespace, 'main') as any).then(
(response: any) => (response && response.body ? response.body.toString() : ''),
(error: any) => {
throw new Error(JSON.stringify(error.body));
},
);
}
/**
@ -176,7 +206,12 @@ export async function getArgoWorkflow(workflowName: string): Promise<IPartialArg
}
const res = await k8sV1CustomObjectClient.getNamespacedCustomObject(
workflowGroup, workflowVersion, namespace, workflowPlural, workflowName)
workflowGroup,
workflowVersion,
namespace,
workflowPlural,
workflowName,
);
if (res.response.statusCode >= 400) {
throw new Error(`Unable to query workflow:${workflowName}: Access denied.`);
@ -198,4 +233,4 @@ export async function getK8sSecret(name: string, key: string) {
const secretb64 = k8sSecret.body.data[key];
const buff = new Buffer(secretb64, 'base64');
return buff.toString('ascii');
}
}

View File

@ -11,17 +11,16 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import {Stream} from 'stream';
import { Stream } from 'stream';
import * as tar from 'tar';
import {Client as MinioClient, ClientOptions as MinioClientOptions} from 'minio';
import {awsInstanceProfileCredentials} from './aws-helper';
import { Client as MinioClient, ClientOptions as MinioClientOptions } from 'minio';
import { awsInstanceProfileCredentials } from './aws-helper';
/** IMinioRequestConfig describes the info required to retrieve an artifact. */
export interface IMinioRequestConfig {
bucket: string;
key: string;
client: MinioClient;
bucket: string;
key: string;
client: MinioClient;
}
/** IMinioClientOptionsWithOptionalSecrets wraps around MinioClientOptions where only endPoint is required (accesskey and secretkey are optional). */
@ -34,38 +33,41 @@ export interface IMinioClientOptionsWithOptionalSecrets extends Partial<MinioCli
* @param config minio client options where `accessKey` and `secretKey` are optional.
*/
export async function createMinioClient(config: IMinioClientOptionsWithOptionalSecrets) {
if (!config.accessKey || !config.secretKey) {
if (await awsInstanceProfileCredentials.ok()) {
const credentials = await awsInstanceProfileCredentials.getCredentials();
if (credentials) {
const {AccessKeyId: accessKey, SecretAccessKey: secretKey, Token: sessionToken} = credentials;
return new MinioClient({...config, accessKey, secretKey, sessionToken});
}
console.error('unable to get credentials from AWS metadata store.')
}
}
return new MinioClient(config as MinioClientOptions);
}
export function getTarObjectAsString({bucket, key, client}: IMinioRequestConfig) {
return new Promise<string>(async (resolve, reject) => {
try {
const stream = await getObjectStream({bucket, key, client});
let contents = '';
stream.pipe(new tar.Parse()).on('entry', (entry: Stream) => {
entry.on('data', (buffer) => contents += buffer.toString());
});
stream.on('end', () => {
resolve(contents);
});
} catch (err) {
reject(err);
if (!config.accessKey || !config.secretKey) {
if (await awsInstanceProfileCredentials.ok()) {
const credentials = await awsInstanceProfileCredentials.getCredentials();
if (credentials) {
const {
AccessKeyId: accessKey,
SecretAccessKey: secretKey,
Token: sessionToken,
} = credentials;
return new MinioClient({ ...config, accessKey, secretKey, sessionToken });
}
});
console.error('unable to get credentials from AWS metadata store.');
}
}
return new MinioClient(config as MinioClientOptions);
}
export function getObjectStream({bucket, key, client}: IMinioRequestConfig) {
export function getTarObjectAsString({ bucket, key, client }: IMinioRequestConfig) {
return new Promise<string>(async (resolve, reject) => {
try {
const stream = await getObjectStream({ bucket, key, client });
let contents = '';
stream.pipe(new tar.Parse()).on('entry', (entry: Stream) => {
entry.on('data', buffer => (contents += buffer.toString()));
});
stream.on('end', () => {
resolve(contents);
});
} catch (err) {
reject(err);
}
});
}
export function getObjectStream({ bucket, key, client }: IMinioRequestConfig) {
return client.getObject(bucket, key);
}
}

View File

@ -22,17 +22,14 @@ export function _extractUrlFromReferer(proxyPrefix: string, referer = ''): strin
}
export function _trimProxyPrefix(proxyPrefix: string, path: string): string {
return path.indexOf(proxyPrefix) === 0 ?
path = path.substr(proxyPrefix.length) :
path;
return path.indexOf(proxyPrefix) === 0 ? (path = path.substr(proxyPrefix.length)) : path;
}
export function _routePathWithReferer(proxyPrefix: string, path: string, referer = ''): string {
// If a referer header is included, extract the referer URL, otherwise
// just trim out the /_proxy/ prefix. Use the origin of the resulting URL.
const proxiedUrlInReferer = _extractUrlFromReferer(proxyPrefix, referer);
let decodedPath =
decodeURIComponent(proxiedUrlInReferer || _trimProxyPrefix(proxyPrefix, path));
let decodedPath = decodeURIComponent(proxiedUrlInReferer || _trimProxyPrefix(proxyPrefix, path));
if (!decodedPath.startsWith('http://') && !decodedPath.startsWith('https://')) {
decodedPath = 'http://' + decodedPath;
}
@ -48,7 +45,6 @@ export function _rewritePath(proxyPrefix: string, path: string, query: string):
}
export default (app: express.Application, apisPrefix: string) => {
const proxyPrefix = apisPrefix + '/_proxy/';
app.use((req, _, next) => {
@ -58,7 +54,8 @@ export default (app: express.Application, apisPrefix: string) => {
const refererUrl = _extractUrlFromReferer(proxyPrefix, req.headers.referer as string);
if (refererUrl && req.url.indexOf(proxyPrefix) !== 0) {
let proxiedUrl = decodeURIComponent(
_extractUrlFromReferer(proxyPrefix, req.headers.referer as string));
_extractUrlFromReferer(proxyPrefix, req.headers.referer as string),
);
if (!proxiedUrl.startsWith('http://') && !proxiedUrl.startsWith('https://')) {
proxiedUrl = 'http://' + proxiedUrl;
}
@ -69,18 +66,20 @@ export default (app: express.Application, apisPrefix: string) => {
next();
});
app.all(proxyPrefix + '*', proxy({
changeOrigin: true,
logLevel: 'debug',
target: 'http://127.0.0.1',
app.all(
proxyPrefix + '*',
proxy({
changeOrigin: true,
logLevel: 'debug',
target: 'http://127.0.0.1',
router: (req: any) => {
return _routePathWithReferer(proxyPrefix, req.path, req.headers.referer as string);
},
pathRewrite: (_, req: any) => {
return _rewritePath(proxyPrefix, req.path, req.query);
},
}));
router: (req: any) => {
return _routePathWithReferer(proxyPrefix, req.path, req.headers.referer as string);
},
pathRewrite: (_, req: any) => {
return _rewritePath(proxyPrefix, req.path, req.query);
},
}),
);
};

View File

@ -13,10 +13,10 @@
// limitations under the License.
import * as express from 'express';
import {Application, static as StaticHandler} from 'express';
import { Application, static as StaticHandler } from 'express';
import * as fs from 'fs';
import * as proxy from 'http-proxy-middleware';
import {Client as MinioClient, ClientOptions as MinioClientOptions} from 'minio';
import { Client as MinioClient, ClientOptions as MinioClientOptions } from 'minio';
import fetch from 'node-fetch';
import * as path from 'path';
import * as process from 'process';
@ -24,11 +24,11 @@ import * as process from 'process';
import * as k8sHelper from './k8s-helper';
import podLogsHandler from './workflow-helper';
import proxyMiddleware from './proxy-middleware';
import {getTarObjectAsString, getObjectStream, createMinioClient} from './minio-helper';
import {Storage} from '@google-cloud/storage';
import {Stream} from 'stream';
import { getTarObjectAsString, getObjectStream, createMinioClient } from './minio-helper';
import { Storage } from '@google-cloud/storage';
import { Stream } from 'stream';
import {loadJSON} from './utils';
import { loadJSON } from './utils';
const BASEPATH = '/pipeline';
@ -63,7 +63,7 @@ const {
/** Envoy service will listen to this port */
METADATA_ENVOY_SERVICE_SERVICE_PORT = '9090',
/** Is Argo log archive enabled? */
ARGO_ARCHIVE_LOGS = "false",
ARGO_ARCHIVE_LOGS = 'false',
/** Use minio or s3 client to retrieve archives. */
ARGO_ARCHIVE_ARTIFACTORY = 'minio',
/** Bucket to retrive logs from */
@ -77,16 +77,16 @@ enum Deployments {
KUBEFLOW = 'KUBEFLOW',
}
const DEPLOYMENT = process.env.DEPLOYMENT === 'KUBEFLOW' ?
Deployments.KUBEFLOW :
Deployments.NOT_SPECIFIED;
const DEPLOYMENT =
process.env.DEPLOYMENT === 'KUBEFLOW' ? Deployments.KUBEFLOW : Deployments.NOT_SPECIFIED;
console.log(`Deployment = ${DEPLOYMENT}`);
/** construct minio endpoint from host and namespace (optional) */
const MINIO_ENDPOINT = MINIO_NAMESPACE && MINIO_NAMESPACE.length > 0 ? `${MINIO_HOST}.${MINIO_NAMESPACE}` : MINIO_HOST;
const MINIO_ENDPOINT =
MINIO_NAMESPACE && MINIO_NAMESPACE.length > 0 ? `${MINIO_HOST}.${MINIO_NAMESPACE}` : MINIO_HOST;
/** converts string to bool */
const _as_bool = (value: string) => ['true', '1'].indexOf(value.toLowerCase()) >= 0
const _as_bool = (value: string) => ['true', '1'].indexOf(value.toLowerCase()) >= 0;
/** minio client for minio storage */
const minioOptions: MinioClientOptions = {
@ -108,21 +108,23 @@ const s3Options: MinioClientOptions = {
const s3ClientPromise = () => createMinioClient(s3Options);
/** pod template spec to use for viewer crd */
const podTemplateSpec = loadJSON(VIEWER_TENSORBOARD_POD_TEMPLATE_SPEC_PATH, k8sHelper.defaultPodTemplateSpec)
const podTemplateSpec = loadJSON(
VIEWER_TENSORBOARD_POD_TEMPLATE_SPEC_PATH,
k8sHelper.defaultPodTemplateSpec,
);
/** set a fallback query to a s3 or minio endpoint for the pod logs. */
if (_as_bool(ARGO_ARCHIVE_LOGS)) {
podLogsHandler.setFallbackHandler(
ARGO_ARCHIVE_ARTIFACTORY==='minio' ? minioOptions : s3Options,
ARGO_ARCHIVE_ARTIFACTORY === 'minio' ? minioOptions : s3Options,
ARGO_ARCHIVE_BUCKETNAME,
ARGO_ARCHIVE_PREFIX,
)
);
}
const app = express() as Application;
app.use(function (req, _, next) {
app.use(function(req, _, next) {
console.info(req.method + ' ' + req.originalUrl);
next();
});
@ -141,14 +143,16 @@ const buildDatePath = path.join(currentDir, 'BUILD_DATE');
const commitHashPath = path.join(currentDir, 'COMMIT_HASH');
const staticDir = path.resolve(process.argv[2]);
const buildDate =
fs.existsSync(buildDatePath) ? fs.readFileSync(buildDatePath, 'utf-8').trim() : '';
const commitHash =
fs.existsSync(commitHashPath) ? fs.readFileSync(commitHashPath, 'utf-8').trim() : '';
const buildDate = fs.existsSync(buildDatePath)
? fs.readFileSync(buildDatePath, 'utf-8').trim()
: '';
const commitHash = fs.existsSync(commitHashPath)
? fs.readFileSync(commitHashPath, 'utf-8').trim()
: '';
const port = process.argv[3] || 3000;
const apiServerAddress = `http://${ML_PIPELINE_SERVICE_HOST}:${ML_PIPELINE_SERVICE_PORT}`;
const envoyServiceAddress = `http://${METADATA_ENVOY_SERVICE_SERVICE_HOST}:${METADATA_ENVOY_SERVICE_SERVICE_PORT}`
const envoyServiceAddress = `http://${METADATA_ENVOY_SERVICE_SERVICE_HOST}:${METADATA_ENVOY_SERVICE_SERVICE_PORT}`;
const v1beta1Prefix = 'apis/v1beta1';
@ -161,8 +165,7 @@ const healthzStats = {
const healthzHandler = async (_, res) => {
try {
const response = await fetch(
`${apiServerAddress}/${v1beta1Prefix}/healthz`, { timeout: 1000 });
const response = await fetch(`${apiServerAddress}/${v1beta1Prefix}/healthz`, { timeout: 1000 });
healthzStats.apiServerReady = true;
const serverStatus = await response.json();
healthzStats.apiServerCommitHash = serverStatus.commit_sha;
@ -199,12 +202,19 @@ const artifactsHandler = async (req, res) => {
const storage = new Storage();
const prefix = key.indexOf('*') > -1 ? key.substr(0, key.indexOf('*')) : key;
const files = await storage.bucket(bucket).getFiles({ prefix });
const matchingFiles = files[0].filter((f) => {
const matchingFiles = files[0].filter(f => {
// Escape regex characters
const escapeRegexChars = (s: string) => s.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&');
// Build a RegExp object that only recognizes asterisks ('*'), and
// escapes everything else.
const regex = new RegExp('^' + key.split(/\*+/).map(escapeRegexChars).join('.*') + '$');
const regex = new RegExp(
'^' +
key
.split(/\*+/)
.map(escapeRegexChars)
.join('.*') +
'$',
);
return regex.test(f.name);
});
@ -218,9 +228,12 @@ const artifactsHandler = async (req, res) => {
matchingFiles.forEach((f, i) => {
const buffer: Buffer[] = [];
f.createReadStream()
.on('data', (data) => buffer.push(Buffer.from(data)))
.on('data', data => buffer.push(Buffer.from(data)))
.on('end', () => {
contents += Buffer.concat(buffer).toString().trim() + '\n';
contents +=
Buffer.concat(buffer)
.toString()
.trim() + '\n';
if (i === matchingFiles.length - 1) {
res.send(contents);
}
@ -234,7 +247,7 @@ const artifactsHandler = async (req, res) => {
case 'minio':
try {
res.send(await getTarObjectAsString({bucket, key, client: minioClient}));
res.send(await getTarObjectAsString({ bucket, key, client: minioClient }));
} catch (err) {
res.status(500).send(`Failed to get object in bucket ${bucket} at path ${key}: ${err}`);
}
@ -242,9 +255,11 @@ const artifactsHandler = async (req, res) => {
case 's3':
try {
const stream = await getObjectStream({bucket, key, client: await s3ClientPromise()});
const stream = await getObjectStream({ bucket, key, client: await s3ClientPromise() });
stream.on('end', () => res.end());
stream.on('error', err => res.status(500).send(`Failed to get object in bucket ${bucket} at path ${key}: ${err}`))
stream.on('error', err =>
res.status(500).send(`Failed to get object in bucket ${bucket} at path ${key}: ${err}`),
);
stream.pipe(res);
} catch (err) {
res.send(`Failed to get object in bucket ${bucket} at path ${key}: ${err}`);
@ -260,7 +275,8 @@ const artifactsHandler = async (req, res) => {
// add authorization header to fetch request if key is non-empty
if (HTTP_AUTHORIZATION_KEY.length > 0) {
// inject original request's value if exists, otherwise default to provided default value
headers[HTTP_AUTHORIZATION_KEY] = req.headers[HTTP_AUTHORIZATION_KEY] || HTTP_AUTHORIZATION_DEFAULT_VALUE;
headers[HTTP_AUTHORIZATION_KEY] =
req.headers[HTTP_AUTHORIZATION_KEY] || HTTP_AUTHORIZATION_DEFAULT_VALUE;
}
const response = await fetch(`${source}://${baseUrl}${bucket}/${key}`, { headers: headers });
const content = await response.buffer();
@ -325,7 +341,7 @@ const logsHandler = async (req, res) => {
try {
const stream = await podLogsHandler.getPodLogs(podName);
stream.on('error', (err) => res.status(500).send('Could not get main container logs: ' + err))
stream.on('error', err => res.status(500).send('Could not get main container logs: ' + err));
stream.on('end', () => res.end());
stream.pipe(res);
} catch (err) {
@ -336,16 +352,15 @@ const logsHandler = async (req, res) => {
const clusterNameHandler = async (req, res) => {
const response = await fetch(
'http://metadata/computeMetadata/v1/instance/attributes/cluster-name',
{ headers: {'Metadata-Flavor': 'Google' } }
{ headers: { 'Metadata-Flavor': 'Google' } },
);
res.send(await response.text());
};
const projectIdHandler = async (req, res) => {
const response = await fetch(
'http://metadata/computeMetadata/v1/project/project-id',
{ headers: {'Metadata-Flavor': 'Google' } }
);
const response = await fetch('http://metadata/computeMetadata/v1/project/project-id', {
headers: { 'Metadata-Flavor': 'Google' },
});
res.send(await response.text());
};
@ -378,45 +393,55 @@ app.get('/visualizations/allowed', allowCustomVisualizationsHandler);
app.get(BASEPATH + '/visualizations/allowed', allowCustomVisualizationsHandler);
// Proxy metadata requests to the Envoy instance which will handle routing to the metadata gRPC server
app.all('/ml_metadata.*', proxy({
changeOrigin: true,
onProxyReq: proxyReq => {
console.log('Metadata proxied request: ', (proxyReq as any).path);
},
target: envoyServiceAddress,
}));
app.all(
'/ml_metadata.*',
proxy({
changeOrigin: true,
onProxyReq: proxyReq => {
console.log('Metadata proxied request: ', (proxyReq as any).path);
},
target: envoyServiceAddress,
}),
);
// Order matters here, since both handlers can match any proxied request with a referer,
// and we prioritize the basepath-friendly handler
proxyMiddleware(app, BASEPATH + '/' + v1beta1Prefix);
proxyMiddleware(app, '/' + v1beta1Prefix);
app.all('/' + v1beta1Prefix + '/*', proxy({
changeOrigin: true,
onProxyReq: proxyReq => {
console.log('Proxied request: ', (proxyReq as any).path);
},
target: apiServerAddress,
}));
app.all(
'/' + v1beta1Prefix + '/*',
proxy({
changeOrigin: true,
onProxyReq: proxyReq => {
console.log('Proxied request: ', (proxyReq as any).path);
},
target: apiServerAddress,
}),
);
app.all(BASEPATH + '/' + v1beta1Prefix + '/*', proxy({
changeOrigin: true,
onProxyReq: proxyReq => {
console.log('Proxied request: ', (proxyReq as any).path);
},
pathRewrite: (path) =>
path.startsWith(BASEPATH) ? path.substr(BASEPATH.length, path.length) : path,
target: apiServerAddress,
}));
app.all(
BASEPATH + '/' + v1beta1Prefix + '/*',
proxy({
changeOrigin: true,
onProxyReq: proxyReq => {
console.log('Proxied request: ', (proxyReq as any).path);
},
pathRewrite: path =>
path.startsWith(BASEPATH) ? path.substr(BASEPATH.length, path.length) : path,
target: apiServerAddress,
}),
);
const DEFAULT_FLAG = 'window.KFP_FLAGS.DEPLOYMENT=null';
const KUBEFLOW_CLIENT_PLACEHOLDER = '<script id="kubeflow-client-placeholder"></script>';
function replaceRuntimeContent(indexHtml: string): string {
if (DEPLOYMENT === Deployments.KUBEFLOW) {
return indexHtml.replace(DEFAULT_FLAG, 'window.KFP_FLAGS.DEPLOYMENT="KUBEFLOW"')
return indexHtml
.replace(DEFAULT_FLAG, 'window.KFP_FLAGS.DEPLOYMENT="KUBEFLOW"')
.replace(
KUBEFLOW_CLIENT_PLACEHOLDER,
`<script id="kubeflow-client-placeholder" src="/dashboard_lib.bundle.js"></script>`
`<script id="kubeflow-client-placeholder" src="/dashboard_lib.bundle.js"></script>`,
);
} else {
return indexHtml;
@ -433,10 +458,14 @@ fs.readFile(path.resolve(staticDir, 'index.html'), (err, data) => {
indexHtml = data.toString();
// sanity checking
if (!indexHtml.includes(DEFAULT_FLAG)) {
throw new Error(`Error: cannot find default flag: '${DEFAULT_FLAG}' in index html. Its content: '${indexHtml}'.`);
throw new Error(
`Error: cannot find default flag: '${DEFAULT_FLAG}' in index html. Its content: '${indexHtml}'.`,
);
}
if (!indexHtml.includes(KUBEFLOW_CLIENT_PLACEHOLDER)) {
throw new Error(`Error: cannot find kubeflow client placeholder: '${KUBEFLOW_CLIENT_PLACEHOLDER}' in index html. Its content: '${indexHtml}'.`)
throw new Error(
`Error: cannot find kubeflow client placeholder: '${KUBEFLOW_CLIENT_PLACEHOLDER}' in index html. Its content: '${indexHtml}'.`,
);
}
}
});

View File

@ -11,7 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import {readFileSync} from 'fs';
import { readFileSync } from 'fs';
export function equalArrays(a1: any[], a2: any[]): boolean {
if (!Array.isArray(a1) || !Array.isArray(a2) || a1.length !== a2.length) {
@ -37,8 +37,8 @@ export function generateRandomString(length: number): string {
export function loadJSON(filepath: string, defaultValue: Object = {}): Object {
if (!filepath) return defaultValue;
try {
return JSON.parse(readFileSync(filepath, "utf-8"))
return JSON.parse(readFileSync(filepath, 'utf-8'));
} catch (error) {
return defaultValue;
}
}
}

View File

@ -1,4 +1,3 @@
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
@ -12,71 +11,70 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import {PassThrough} from 'stream';
import {ClientOptions as MinioClientOptions} from 'minio';
import {getK8sSecret, getArgoWorkflow, getPodLogs} from './k8s-helper';
import {createMinioClient, IMinioRequestConfig, getObjectStream} from './minio-helper';
import { PassThrough } from 'stream';
import { ClientOptions as MinioClientOptions } from 'minio';
import { getK8sSecret, getArgoWorkflow, getPodLogs } from './k8s-helper';
import { createMinioClient, IMinioRequestConfig, getObjectStream } from './minio-helper';
export interface IPartialArgoWorkflow {
status: {
nodes?: IArgoWorkflowStatusNode
}
status: {
nodes?: IArgoWorkflowStatusNode;
};
}
export interface IArgoWorkflowStatusNode {
[key: string]: IArgoWorkflowStatusNodeInfo;
[key: string]: IArgoWorkflowStatusNodeInfo;
}
export interface IArgoWorkflowStatusNodeInfo {
outputs?: {
artifacts?: IArtifactRecord[]
}
outputs?: {
artifacts?: IArtifactRecord[];
};
}
export interface IArtifactRecord {
archiveLogs?: boolean;
name: string;
s3?: IS3Artifact;
archiveLogs?: boolean;
name: string;
s3?: IS3Artifact;
}
export interface IS3Artifact {
accessKeySecret?: ISecretSelector;
bucket: string;
endpoint: string;
insecure: boolean;
key: string;
secretKeySecret?: ISecretSelector;
accessKeySecret?: ISecretSelector;
bucket: string;
endpoint: string;
insecure: boolean;
key: string;
secretKeySecret?: ISecretSelector;
}
export interface ISecretSelector {
key: string;
name: string;
key: string;
name: string;
}
/**
* Returns the k8s access key and secret used to connect to the s3 artifactory.
* @param s3artifact s3artifact object describing the s3 artifactory config for argo workflow.
* @param s3artifact s3artifact object describing the s3 artifactory config for argo workflow.
*/
async function getMinioClientSecrets({accessKeySecret, secretKeySecret}: IS3Artifact) {
if (!accessKeySecret || !secretKeySecret) {
return {}
}
const accessKey = await getK8sSecret(accessKeySecret.name, accessKeySecret.key);
const secretKey = await getK8sSecret(secretKeySecret.name, secretKeySecret.key);
return {accessKey, secretKey};
async function getMinioClientSecrets({ accessKeySecret, secretKeySecret }: IS3Artifact) {
if (!accessKeySecret || !secretKeySecret) {
return {};
}
const accessKey = await getK8sSecret(accessKeySecret.name, accessKeySecret.key);
const secretKey = await getK8sSecret(secretKeySecret.name, secretKeySecret.key);
return { accessKey, secretKey };
}
/**
* Split an uri into host and port.
* @param uri uri to split
* @param insecure if port is not provided in uri, return port depending on whether ssl is enabled.
*/
*/
function urlSplit(uri: string, insecure: boolean) {
let chunks = uri.split(":");
if (chunks.length==1)
return {host: chunks[0], port: !!insecure ? 80 : 443};
return {host: chunks[0], port: parseInt(chunks[1], 10)};
let chunks = uri.split(':');
if (chunks.length == 1) return { host: chunks[0], port: !!insecure ? 80 : 443 };
return { host: chunks[0], port: parseInt(chunks[1], 10) };
}
/**
@ -84,101 +82,107 @@ function urlSplit(uri: string, insecure: boolean) {
* @param podName name of the pod.
*/
function workflowNameFromPodName(podName: string) {
let chunks = podName.split("-");
chunks.pop();
return chunks.join("-");
let chunks = podName.split('-');
chunks.pop();
return chunks.join('-');
}
export class PodLogsHandler {
fromConfig?: (podName: string) => Promise<IMinioRequestConfig>;
fromConfig?: (podName: string) => Promise<IMinioRequestConfig>;
async getPodLogs(podName: string) {
async getPodLogs(podName: string) {
try {
// retrieve from k8s
const stream = new PassThrough();
stream.end(await getPodLogs(podName));
console.log(`Getting logs for pod:${podName}.`);
return stream;
} catch (k8sError) {
console.error(`Unable to get logs for pod:${podName}: ${k8sError}`);
return this.getPodLogsFromArchive(podName);
}
}
async getPodLogsFromArchive(podName: string) {
try {
// try argo workflow crd status
const request = await this.fromWorkflow(podName);
const stream = await getObjectStream(request);
console.log(`Getting logs for pod:${podName} from ${request.bucket}/${request.key}.`);
return stream;
} catch (workflowError) {
if (!!this.fromConfig) {
try {
// retrieve from k8s
const stream = new PassThrough();
stream.end(await getPodLogs(podName));
console.log(`Getting logs for pod:${podName}.`)
return stream;
} catch (k8sError) {
console.error(`Unable to get logs for pod:${podName}: ${k8sError}`);
return this.getPodLogsFromArchive(podName);
const request = await this.fromConfig(podName);
const stream = await getObjectStream(request);
console.log(`Getting logs for pod:${podName} from ${request.bucket}/${request.key}.`);
return stream;
} catch (configError) {
console.error(`Unable to get logs for pod:${podName}: ${configError}`);
throw new Error(
`Unable to retrieve logs from ${podName}: ${workflowError}, ${configError}`,
);
}
}
console.error(`Unable to get logs for pod:${podName}: ${workflowError}`);
console.error(workflowError);
throw new Error(`Unable to retrieve logs from ${podName}: ${workflowError}`);
}
}
async getPodLogsFromArchive(podName: string) {
try {
// try argo workflow crd status
const request = await this.fromWorkflow(podName);
const stream = await getObjectStream(request);
console.log(`Getting logs for pod:${podName} from ${request.bucket}/${request.key}.`)
return stream;
} catch (workflowError) {
if (!!this.fromConfig) {
try {
const request = await this.fromConfig(podName);
const stream = await getObjectStream(request);
console.log(`Getting logs for pod:${podName} from ${request.bucket}/${request.key}.`)
return stream;
} catch (configError) {
console.error(`Unable to get logs for pod:${podName}: ${configError}`);
throw new Error(`Unable to retrieve logs from ${podName}: ${workflowError}, ${configError}`)
}
}
console.error(`Unable to get logs for pod:${podName}: ${workflowError}`);
console.error(workflowError);
throw new Error(`Unable to retrieve logs from ${podName}: ${workflowError}`)
}
}
setFallbackHandler(minioOptions: MinioClientOptions, bucket: string, prefix: string) {
const client = createMinioClient(minioOptions);
this.fromConfig = async function(podName: string): Promise<IMinioRequestConfig> {
const workflowName = workflowNameFromPodName(podName);
return {
bucket,
key: `${prefix}/${workflowName}/${podName}/main.log`,
client: await client,
};
};
return this;
}
setFallbackHandler(minioOptions: MinioClientOptions, bucket: string, prefix: string) {
const client = createMinioClient(minioOptions);
this.fromConfig = async function(podName: string): Promise<IMinioRequestConfig> {
const workflowName = workflowNameFromPodName(podName);
return {
bucket,
key: `${prefix}/${workflowName}/${podName}/main.log`,
client: await client
};
}
return this;
}
async fromWorkflow(podName: string): Promise<IMinioRequestConfig> {
const workflow = await getArgoWorkflow(workflowNameFromPodName(podName));
async fromWorkflow(podName: string): Promise<IMinioRequestConfig> {
const workflow = await getArgoWorkflow(workflowNameFromPodName(podName));
// check if required fields are available
if (!workflow.status || !workflow.status.nodes ||
!workflow.status.nodes[podName] ||
!workflow.status.nodes[podName].outputs ||
!workflow.status.nodes[podName].outputs.artifacts)
throw new Error('Unable to find pod info in workflow status to retrieve logs.')
const artifacts: IArtifactRecord[] = workflow.status.nodes[podName].outputs.artifacts;
const archiveLogs: IArtifactRecord[] = artifacts.filter((artifact: any) => artifact.archiveLogs)
if (archiveLogs.length === 0)
throw new Error('Unable to find pod log archive information from workflow status.')
const s3Artifact = archiveLogs[0].s3;
if (!s3Artifact)
throw new Error('Unable to find s3 artifact info from workflow status.')
const {host, port} = urlSplit(s3Artifact.endpoint, s3Artifact.insecure);
const {accessKey, secretKey} = await getMinioClientSecrets(s3Artifact);
const client = await createMinioClient({
endPoint: host,
port,
accessKey,
secretKey,
useSSL: !s3Artifact.insecure,
});
return {
bucket: s3Artifact.bucket,
key: s3Artifact.key,
client,
};
}
// check if required fields are available
if (
!workflow.status ||
!workflow.status.nodes ||
!workflow.status.nodes[podName] ||
!workflow.status.nodes[podName].outputs ||
!workflow.status.nodes[podName].outputs.artifacts
)
throw new Error('Unable to find pod info in workflow status to retrieve logs.');
const artifacts: IArtifactRecord[] = workflow.status.nodes[podName].outputs.artifacts;
const archiveLogs: IArtifactRecord[] = artifacts.filter(
(artifact: any) => artifact.archiveLogs,
);
if (archiveLogs.length === 0)
throw new Error('Unable to find pod log archive information from workflow status.');
const s3Artifact = archiveLogs[0].s3;
if (!s3Artifact) throw new Error('Unable to find s3 artifact info from workflow status.');
const { host, port } = urlSplit(s3Artifact.endpoint, s3Artifact.insecure);
const { accessKey, secretKey } = await getMinioClientSecrets(s3Artifact);
const client = await createMinioClient({
endPoint: host,
port,
accessKey,
secretKey,
useSSL: !s3Artifact.insecure,
});
return {
bucket: s3Artifact.bucket,
key: s3Artifact.key,
client,
};
}
}
const podLogsHandler = new PodLogsHandler();
export default podLogsHandler;
export default podLogsHandler;

View File

@ -179,8 +179,8 @@ export interface Inputs {
* Pod metdata
*/
export interface Metadata {
annotations?: { [key: string]: string; };
labels?: { [key: string]: string; };
annotations?: { [key: string]: string };
labels?: { [key: string]: string };
}
/**
* Outputs hold parameters, artifacts, and results from a step
@ -564,7 +564,7 @@ export interface Template {
* run on the selected node(s). Overrides the selector set at the workflow
* level.
*/
nodeSelector?: { [key: string]: string; };
nodeSelector?: { [key: string]: string };
/**
* Outputs describe the parameters and artifacts that this template produces
*/
@ -747,7 +747,6 @@ export interface NodeStatus {
}
export interface WorkflowStatus {
/**
* Phase a simple, high-level summary of where the workflow is in its lifecycle.
*/
@ -825,7 +824,7 @@ export interface WorkflowSpec {
* to be scheduled on the selected node(s).
* This is able to be overridden by a nodeSelector specified in the template.
*/
nodeSelector?: { [key: string]: string; };
nodeSelector?: { [key: string]: string };
/**
* OnExit is a template reference which is invoked at the end of the
* workflow, irrespective of the success, failure, or error of the primary

View File

@ -26,7 +26,6 @@ export type SecurityContext = any;
export type PersistentVolumeClaim = any;
export type Affinity = any;
export interface VolumeMount {
name: string;
mountPath?: string;