Enhancement: Moving pagination, sorting and filtering of workflow runs table to the backend (#2829)

* added pagination for QueryWorkflowRuns

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* filtering workflowRuns based on workflowRunIDs

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* changed the API for getWorkflowRuns in frontend

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* added pagination for frontend and refactored code to accomodate the changes

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* Added Sorting and Filtering

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* sorting added from backend api call

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* filtering removed from frontend and used backend APIs to filter data

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* typed execution data in backend and sent common metadata from execution data in workflowruns hence reducing the data size in frontend; sorting based on workflowrun phase done in backend

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* changing resiliency score to null in case of running workflows

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* WIP: filtering and sorting done, pagination remaining

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* pagination completed in database

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* reverted ID -> String changes

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* changed the sortStage

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* Added condition to check no workflows

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* Pagination bug fix (#1)

* bug fix trails #1

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* reverting local dev changes

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* fixed the workflow subscription bugs...EVERYTHING FINALLY WORKS

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* removed comments from config

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* resolved review comments: translations, formatting and removing binary file

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* fixed some bugs and added Execution data to types.go

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* go fmt project

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

Co-authored-by: SarthakJain26 <sarthak@chaosnative.com>
This commit is contained in:
Arkajyoti Mukherjee 2021-06-07 11:05:45 +05:30 committed by GitHub
parent c77e5d24fe
commit aa5fe68456
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 1741 additions and 640 deletions

View File

@ -416,6 +416,7 @@ chaosWorkflows:
browseWorkflows: browseWorkflows:
status: Status status: Status
name: Name name: Name
dateFilterHelperText: Select a period
targetAgent: Target Agent targetAgent: Target Agent
reliabilityDetails: Reliability Details reliabilityDetails: Reliability Details
experiments: Experiments experiments: Experiments

View File

@ -1,17 +1,39 @@
import { gql } from '@apollo/client'; import { gql } from '@apollo/client';
export const WORKFLOW_DETAILS = gql` export const WORKFLOW_DETAILS_WITH_EXEC_DATA = gql`
query workflowDetails($projectID: String!) { query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
getWorkFlowRuns(project_id: $projectID) { getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
total_no_of_workflow_runs
workflow_runs {
workflow_id workflow_id
workflow_name workflow_name
workflow_run_id workflow_run_id
execution_data
project_id
cluster_name cluster_name
last_updated last_updated
cluster_type
cluster_id cluster_id
phase
execution_data
resiliency_score
}
}
}
`;
export const WORKFLOW_DETAILS = gql`
query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
total_no_of_workflow_runs
workflow_runs {
workflow_id
workflow_name
workflow_run_id
cluster_name
last_updated
phase
resiliency_score
experiments_passed
total_experiments
}
} }
} }
`; `;
@ -150,6 +172,14 @@ export const GET_CLUSTER_LENGTH = gql`
} }
`; `;
export const GET_CLUSTER_NAMES = gql`
query getClusters($project_id: String!) {
getCluster(project_id: $project_id) {
cluster_name
}
}
`;
export const ALL_USERS = gql` export const ALL_USERS = gql`
query allUsers { query allUsers {
users { users {

View File

@ -1,16 +1,33 @@
import { gql } from '@apollo/client'; import { gql } from '@apollo/client';
export const WORKFLOW_EVENTS_WITH_EXEC_DATA = gql`
subscription workflowEvents($projectID: String!) {
workflowEventListener(project_id: $projectID) {
workflow_id
workflow_name
workflow_run_id
cluster_name
last_updated
cluster_id
phase
execution_data
resiliency_score
}
}
`;
export const WORKFLOW_EVENTS = gql` export const WORKFLOW_EVENTS = gql`
subscription workflowEvents($projectID: String!) { subscription workflowEvents($projectID: String!) {
workflowEventListener(project_id: $projectID) { workflowEventListener(project_id: $projectID) {
workflow_id workflow_id
workflow_name workflow_name
workflow_run_id workflow_run_id
execution_data
project_id
cluster_name cluster_name
last_updated last_updated
cluster_id phase
resiliency_score
experiments_passed
total_experiments
} }
} }
`; `;

View File

@ -53,16 +53,67 @@ export interface WorkflowRun {
workflow_run_id: string; workflow_run_id: string;
cluster_type: string; cluster_type: string;
cluster_id: string; cluster_id: string;
phase: string;
resiliency_score?: number;
experiments_passed?: number;
total_experiments?: number;
}
interface GetWorkflowRunsOutput {
total_no_of_workflow_runs: number;
workflow_runs: WorkflowRun[];
} }
export interface Workflow { export interface Workflow {
getWorkFlowRuns: WorkflowRun[]; getWorkflowRuns: GetWorkflowRunsOutput;
} }
export interface WorkflowSubscription { export interface WorkflowSubscription {
workflowEventListener: WorkflowRun; workflowEventListener: WorkflowRun;
} }
export interface WorkflowDataVars { export interface WorkflowSubscriptionInput {
projectID: string; projectID: string;
} }
// Pagination
export interface Pagination {
page: number;
limit: number;
}
// Sort
export interface SortInput {
field: 'Name' | 'Time';
descending?: boolean;
}
// Filter
interface DateRange {
start_date: string;
end_date?: string;
}
export type WorkflowStatus =
| 'All'
| 'Failed'
| 'Running'
| 'Succeeded'
| undefined;
export interface WorkflowRunFilterInput {
workflow_name?: string;
cluster_name?: string;
workflow_status?: WorkflowStatus;
date_range?: DateRange;
}
export interface WorkflowDataVars {
workflowRunsInput: {
project_id: string;
workflow_run_ids?: string[];
pagination?: Pagination;
sort?: SortInput;
filter?: WorkflowRunFilterInput;
};
}

View File

@ -4,15 +4,15 @@ import Tabs from '@material-ui/core/Tabs/Tabs';
import React, { useEffect, useState } from 'react'; import React, { useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom'; import { useParams } from 'react-router-dom';
import BackButton from '../../components/Button/BackButton'; import BackButton from '../../components/Button/BackButton';
import Loader from '../../components/Loader'; import Loader from '../../components/Loader';
import { StyledTab, TabPanel } from '../../components/Tabs'; import { StyledTab, TabPanel } from '../../components/Tabs';
import Scaffold from '../../containers/layouts/Scaffold'; import Scaffold from '../../containers/layouts/Scaffold';
import { import {
SCHEDULE_DETAILS, SCHEDULE_DETAILS,
WORKFLOW_DETAILS, WORKFLOW_DETAILS_WITH_EXEC_DATA,
WORKFLOW_EVENTS, WORKFLOW_EVENTS_WITH_EXEC_DATA,
} from '../../graphql'; } from '../../graphql';
import { import {
ScheduleDataVars, ScheduleDataVars,
@ -24,6 +24,7 @@ import {
Workflow, Workflow,
WorkflowDataVars, WorkflowDataVars,
WorkflowSubscription, WorkflowSubscription,
WorkflowSubscriptionInput,
} from '../../models/graphql/workflowData'; } from '../../models/graphql/workflowData';
import useActions from '../../redux/actions'; import useActions from '../../redux/actions';
import * as NodeSelectionActions from '../../redux/actions/nodeSelection'; import * as NodeSelectionActions from '../../redux/actions/nodeSelection';
@ -37,6 +38,10 @@ import WorkflowNodeInfo from '../../views/WorkflowDetails/WorkflowNodeInfo';
import NodeTable from '../../views/WorkflowDetails/WorkflowTable'; import NodeTable from '../../views/WorkflowDetails/WorkflowTable';
import useStyles from './styles'; import useStyles from './styles';
interface URLParams {
workflowRunId: string;
}
const WorkflowDetails: React.FC = () => { const WorkflowDetails: React.FC = () => {
const theme = useTheme(); const theme = useTheme();
const { t } = useTranslation(); const { t } = useTranslation();
@ -62,19 +67,23 @@ const WorkflowDetails: React.FC = () => {
const { pod_name } = useSelector((state: RootState) => state.selectedNode); const { pod_name } = useSelector((state: RootState) => state.selectedNode);
// Getting the workflow nome from the pathname const { workflowRunId }: URLParams = useParams();
const { pathname } = useLocation();
const workflowRunId = pathname.split('/')[2];
// Query to get workflows // Query to get workflows
const { subscribeToMore, data, error } = useQuery<Workflow, WorkflowDataVars>( const { subscribeToMore, data, error } = useQuery<Workflow, WorkflowDataVars>(
WORKFLOW_DETAILS, WORKFLOW_DETAILS_WITH_EXEC_DATA,
{ variables: { projectID } } {
variables: {
workflowRunsInput: {
project_id: projectID,
workflow_run_ids: [workflowRunId],
},
},
fetchPolicy: 'cache-and-network',
}
); );
const workflow = data?.getWorkFlowRuns.filter( const workflow = data?.getWorkflowRuns.workflow_runs[0];
(w) => w.workflow_run_id === workflowRunId
)[0];
// Apollo query to get the scheduled data // Apollo query to get the scheduled data
const { data: SchedulesData, loading } = useQuery< const { data: SchedulesData, loading } = useQuery<
@ -87,34 +96,32 @@ const WorkflowDetails: React.FC = () => {
// Using subscription to get realtime data // Using subscription to get realtime data
useEffect(() => { useEffect(() => {
if ( if (workflow?.phase && workflow.phase === 'Running') {
workflow?.execution_data && subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({
(JSON.parse(workflow?.execution_data) as ExecutionData).phase === document: WORKFLOW_EVENTS_WITH_EXEC_DATA,
'Running'
) {
subscribeToMore<WorkflowSubscription>({
document: WORKFLOW_EVENTS,
variables: { projectID }, variables: { projectID },
updateQuery: (prev, { subscriptionData }) => { updateQuery: (prev, { subscriptionData }) => {
if (!subscriptionData.data) return prev; if (!subscriptionData.data || !prev || !prev.getWorkflowRuns)
const modifiedWorkflows = prev.getWorkFlowRuns.slice(); return prev;
const modifiedWorkflows = prev.getWorkflowRuns.workflow_runs.slice();
const newWorkflow = subscriptionData.data.workflowEventListener; const newWorkflow = subscriptionData.data.workflowEventListener;
// Updating the query data // Update only the required workflowRun
let i = 0;
for (; i < modifiedWorkflows.length; i++) {
if ( if (
modifiedWorkflows[i].workflow_run_id === modifiedWorkflows[0].workflow_run_id === newWorkflow.workflow_run_id
newWorkflow.workflow_run_id )
) { modifiedWorkflows[0] = newWorkflow;
modifiedWorkflows[i] = newWorkflow;
break;
}
}
if (i === modifiedWorkflows.length)
modifiedWorkflows.unshift(newWorkflow);
return { ...prev, getWorkFlowRuns: modifiedWorkflows }; const totalNoOfWorkflows =
prev.getWorkflowRuns.total_no_of_workflow_runs;
return {
getWorkflowRuns: {
total_no_of_workflow_runs: totalNoOfWorkflows,
workflow_runs: modifiedWorkflows,
},
};
}, },
}); });
} }
@ -233,6 +240,7 @@ const WorkflowDetails: React.FC = () => {
data={ data={
JSON.parse(workflow.execution_data) as ExecutionData JSON.parse(workflow.execution_data) as ExecutionData
} }
resiliency_score={workflow.resiliency_score}
/> />
)} )}
</div> </div>
@ -245,6 +253,7 @@ const WorkflowDetails: React.FC = () => {
tab={2} tab={2}
cluster_name={workflow.cluster_name} cluster_name={workflow.cluster_name}
data={JSON.parse(workflow.execution_data) as ExecutionData} data={JSON.parse(workflow.execution_data) as ExecutionData}
resiliency_score={workflow.resiliency_score}
/> />
{/* Table for all Node details */} {/* Table for all Node details */}
<NodeTable <NodeTable

View File

@ -18,16 +18,16 @@ import React, { useState } from 'react';
import { DateRangePicker } from 'react-date-range'; import { DateRangePicker } from 'react-date-range';
import 'react-date-range/dist/styles.css'; // main css file import 'react-date-range/dist/styles.css'; // main css file
import 'react-date-range/dist/theme/default.css'; // theme css file import 'react-date-range/dist/theme/default.css'; // theme css file
import { Workflow, WorkflowRun } from '../../../models/graphql/workflowData'; import { Clusters } from '../../../models/graphql/clusterData';
import { WorkflowStatus } from '../../../models/graphql/workflowData';
import useStyles from './styles'; import useStyles from './styles';
interface HeaderSectionProps { interface HeaderSectionProps {
searchValue: string; searchValue?: string;
statusValue: string; statusValue?: WorkflowStatus;
clusterValue: string; clusterValue?: string;
isOpen: boolean; isOpen: boolean;
data: Workflow | undefined; clusterList?: Partial<Clusters>;
getClusters: (wfdata: WorkflowRun[]) => string[];
isDateOpen: boolean; isDateOpen: boolean;
popAnchorEl: HTMLElement | null; popAnchorEl: HTMLElement | null;
displayDate: string; displayDate: string;
@ -62,10 +62,9 @@ const HeaderSection: React.FC<HeaderSectionProps> = ({
statusValue, statusValue,
clusterValue, clusterValue,
isOpen, isOpen,
data,
popAnchorEl, popAnchorEl,
displayDate, displayDate,
getClusters, clusterList,
changeSearch, changeSearch,
changeStatus, changeStatus,
changeCluster, changeCluster,
@ -82,6 +81,7 @@ const HeaderSection: React.FC<HeaderSectionProps> = ({
key: 'selection', key: 'selection',
}, },
]); ]);
return ( return (
<div> <div>
<div className={classes.headerSection}> <div className={classes.headerSection}>
@ -127,13 +127,11 @@ const HeaderSection: React.FC<HeaderSectionProps> = ({
className={classes.selectText} className={classes.selectText}
> >
<MenuItem value="All">All</MenuItem> <MenuItem value="All">All</MenuItem>
{(data ? getClusters(data.getWorkFlowRuns) : []).map( {clusterList?.getCluster?.map((cluster) => (
(cluster: string) => ( <MenuItem key={cluster.cluster_name} value={cluster.cluster_name}>
<MenuItem key={cluster} value={cluster}> {cluster.cluster_name}
{cluster}
</MenuItem> </MenuItem>
) ))}
)}
</Select> </Select>
</FormControl> </FormControl>

View File

@ -1,3 +1,4 @@
import { useQuery } from '@apollo/client';
import { import {
Button, Button,
IconButton, IconButton,
@ -7,36 +8,31 @@ import {
TableCell, TableCell,
Typography, Typography,
} from '@material-ui/core'; } from '@material-ui/core';
import { useQuery } from '@apollo/client'; import ChevronRightIcon from '@material-ui/icons/ChevronRight';
import KeyboardArrowDownIcon from '@material-ui/icons/KeyboardArrowDown';
import MoreVertIcon from '@material-ui/icons/MoreVert'; import MoreVertIcon from '@material-ui/icons/MoreVert';
import React from 'react'; import React from 'react';
import KeyboardArrowDownIcon from '@material-ui/icons/KeyboardArrowDown';
import ChevronRightIcon from '@material-ui/icons/ChevronRight';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { import TimePopOver from '../../../components/TimePopOver';
ExecutionData,
WorkflowRun,
} from '../../../models/graphql/workflowData';
import { history } from '../../../redux/configureStore';
import { getProjectID, getProjectRole } from '../../../utils/getSearchParams';
import CustomStatus from '../CustomStatus/Status';
import useStyles from './styles';
import useActions from '../../../redux/actions';
import * as NodeSelectionActions from '../../../redux/actions/nodeSelection';
import { WORKFLOW_LIST_DETAILS } from '../../../graphql'; import { WORKFLOW_LIST_DETAILS } from '../../../graphql';
import { WorkflowRun } from '../../../models/graphql/workflowData';
import { import {
WorkflowList, WorkflowList,
WorkflowListDataVars, WorkflowListDataVars,
} from '../../../models/graphql/workflowListData'; } from '../../../models/graphql/workflowListData';
import useActions from '../../../redux/actions';
import * as NodeSelectionActions from '../../../redux/actions/nodeSelection';
import { history } from '../../../redux/configureStore';
import { getProjectID, getProjectRole } from '../../../utils/getSearchParams';
import ExperimentPoints from '../BrowseSchedule/ExperimentPoints'; import ExperimentPoints from '../BrowseSchedule/ExperimentPoints';
import TimePopOver from '../../../components/TimePopOver'; import CustomStatus from '../CustomStatus/Status';
import useStyles from './styles';
interface TableDataProps { interface TableDataProps {
data: WorkflowRun; data: Partial<WorkflowRun>;
exeData: ExecutionData;
} }
const TableData: React.FC<TableDataProps> = ({ data, exeData }) => { const TableData: React.FC<TableDataProps> = ({ data }) => {
const classes = useStyles(); const classes = useStyles();
const projectID = getProjectID(); const projectID = getProjectID();
const projectRole = getProjectRole(); const projectRole = getProjectRole();
@ -96,9 +92,7 @@ const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
return ( return (
<> <>
<TableCell className={classes.tableDataStatus}> <TableCell className={classes.tableDataStatus}>
<CustomStatus <CustomStatus status={data.phase ?? ''} />
status={exeData.finishedAt.length === 0 ? 'Running' : exeData.phase}
/>
</TableCell> </TableCell>
<TableCell <TableCell
className={classes.workflowNameData} className={classes.workflowNameData}
@ -119,23 +113,24 @@ const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
</TableCell> </TableCell>
<TableCell> <TableCell>
<Typography className={classes.clusterName}> <Typography className={classes.clusterName}>
{nameCapitalized(data.cluster_name)} {nameCapitalized(data.cluster_name ?? '')}
</Typography> </Typography>
</TableCell> </TableCell>
<TableCell className={classes.reliabiltyData}> <TableCell className={classes.reliabiltyData}>
<Typography> <Typography>
<span>{t('chaosWorkflows.browseWorkflows.tableData.overallRR')}</span> <span>{t('chaosWorkflows.browseWorkflows.tableData.overallRR')}</span>
{!exeData.resiliency_score ? ( {data.resiliency_score === undefined ||
data.resiliency_score === null ? (
<span className={classes.less}> <span className={classes.less}>
{t('chaosWorkflows.browseWorkflows.tableData.na')} {t('chaosWorkflows.browseWorkflows.tableData.na')}
</span> </span>
) : ( ) : (
<span <span
className={`${classes.boldText} ${getResiliencyScoreColor( className={`${classes.boldText} ${getResiliencyScoreColor(
exeData.resiliency_score data.resiliency_score
)}`} )}`}
> >
{exeData.resiliency_score}% {data.resiliency_score}%
</span> </span>
)} )}
</Typography> </Typography>
@ -143,17 +138,22 @@ const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
<span> <span>
{t('chaosWorkflows.browseWorkflows.tableData.experimentsPassed')} {t('chaosWorkflows.browseWorkflows.tableData.experimentsPassed')}
</span> </span>
{!exeData.resiliency_score ? ( {data.experiments_passed === undefined ||
data.experiments_passed === null ||
data.total_experiments === undefined ||
data.total_experiments === null ||
data.resiliency_score === undefined ||
data.resiliency_score === null ? (
<span className={classes.less}> <span className={classes.less}>
{t('chaosWorkflows.browseWorkflows.tableData.na')} {t('chaosWorkflows.browseWorkflows.tableData.na')}
</span> </span>
) : ( ) : (
<span <span
className={`${classes.boldText} ${getResiliencyScoreColor( className={`${classes.boldText} ${getResiliencyScoreColor(
exeData.resiliency_score data.resiliency_score
)}`} )}`}
> >
{exeData.experiments_passed}/{exeData.total_experiments} {data.experiments_passed}/{data.total_experiments}
</span> </span>
)} )}
</Typography> </Typography>
@ -209,7 +209,7 @@ const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
</div> </div>
</TableCell> </TableCell>
<TableCell> <TableCell>
<TimePopOver unixTime={data.last_updated} /> <TimePopOver unixTime={data.last_updated ?? ''} />
</TableCell> </TableCell>
<TableCell> <TableCell>
<IconButton <IconButton

View File

@ -16,74 +16,134 @@ import ExpandMoreIcon from '@material-ui/icons/ExpandMore';
import moment from 'moment'; import moment from 'moment';
import React, { useEffect, useState } from 'react'; import React, { useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { WORKFLOW_DETAILS, WORKFLOW_EVENTS } from '../../../graphql';
import { import {
ExecutionData, GET_CLUSTER_NAMES,
WORKFLOW_DETAILS,
WORKFLOW_EVENTS,
} from '../../../graphql';
import { Clusters, ClusterVars } from '../../../models/graphql/clusterData';
import {
Pagination,
SortInput,
Workflow, Workflow,
WorkflowDataVars, WorkflowDataVars,
WorkflowRun, WorkflowRun,
WorkflowRunFilterInput,
WorkflowStatus,
WorkflowSubscription, WorkflowSubscription,
WorkflowSubscriptionInput,
} from '../../../models/graphql/workflowData'; } from '../../../models/graphql/workflowData';
import { getProjectID } from '../../../utils/getSearchParams'; import { getProjectID } from '../../../utils/getSearchParams';
import {
sortAlphaAsc,
sortAlphaDesc,
sortNumAsc,
sortNumDesc,
} from '../../../utils/sort';
import HeaderSection from './HeaderSection'; import HeaderSection from './HeaderSection';
import useStyles from './styles'; import useStyles from './styles';
import TableData from './TableData'; import TableData from './TableData';
interface FilterOptions {
search: string;
status: string;
cluster: string;
}
interface PaginationData {
pageNo: number;
rowsPerPage: number;
}
interface SortData {
lastRun: { sort: boolean; ascending: boolean };
name: { sort: boolean; ascending: boolean };
noOfSteps: { sort: boolean; ascending: boolean };
}
interface DateData {
dateValue: string;
fromDate: string;
toDate: string;
}
const BrowseWorkflow: React.FC = () => { const BrowseWorkflow: React.FC = () => {
const classes = useStyles(); const classes = useStyles();
const projectID = getProjectID(); const projectID = getProjectID();
const { t } = useTranslation(); const { t } = useTranslation();
// State for pagination
const [paginationData, setPaginationData] = useState<Pagination>({
page: 0,
limit: 10,
});
// States for filters
const [filters, setFilters] = useState<WorkflowRunFilterInput>({
workflow_name: '',
cluster_name: 'All',
workflow_status: 'All',
date_range: {
start_date: new Date(0).valueOf().toString(),
},
});
// State for date to be displayed
const [displayDate, setDisplayDate] = React.useState<string>(
t('chaosWorkflows.browseWorkflows.dateFilterHelperText')
);
// State for sorting
const [sortData, setSortData] = useState<SortInput>({
field: 'Time',
descending: true,
});
// Checks if the workflow event from subscription exists in the table
function isFiltered(newWorkflow: WorkflowRun) {
const nameExists =
filters.workflow_name &&
newWorkflow.workflow_name
.toLowerCase()
.includes(filters.workflow_name.toLowerCase());
const clusterExists =
filters.cluster_name === 'All' ||
filters.cluster_name === newWorkflow.cluster_name;
const phaseExists =
filters.workflow_status === 'All' ||
filters.workflow_status === newWorkflow.phase;
const dateExists =
filters.date_range &&
newWorkflow.last_updated >= filters.date_range.start_date &&
(filters.date_range.end_date
? newWorkflow.last_updated < filters.date_range.end_date
: true);
const shouldAddNewWorkflow =
nameExists && clusterExists && phaseExists && dateExists;
return shouldAddNewWorkflow;
}
// Query to get list of Clusters
const { data: clusterList } = useQuery<Partial<Clusters>, ClusterVars>(
GET_CLUSTER_NAMES,
{
variables: {
project_id: projectID,
},
}
);
// Query to get workflows // Query to get workflows
const { subscribeToMore, data, error } = useQuery<Workflow, WorkflowDataVars>( const { subscribeToMore, data, error } = useQuery<Workflow, WorkflowDataVars>(
WORKFLOW_DETAILS, WORKFLOW_DETAILS,
{ {
variables: { projectID }, variables: {
workflowRunsInput: {
project_id: projectID,
pagination: {
page: paginationData.page,
limit: paginationData.limit,
},
sort: sortData,
filter: filters,
},
},
fetchPolicy: 'cache-and-network', fetchPolicy: 'cache-and-network',
} }
); );
// Using subscription to get realtime data // Using subscription to get realtime data
useEffect(() => { useEffect(() => {
subscribeToMore<WorkflowSubscription>({ subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({
document: WORKFLOW_EVENTS, document: WORKFLOW_EVENTS,
variables: { projectID }, variables: { projectID },
updateQuery: (prev, { subscriptionData }) => { updateQuery: (prev, { subscriptionData }) => {
if (!subscriptionData.data) return prev; if (!subscriptionData.data || !prev || !prev.getWorkflowRuns)
const modifiedWorkflows = prev.getWorkFlowRuns.slice(); return prev;
const modifiedWorkflows = prev.getWorkflowRuns.workflow_runs.slice();
const newWorkflow = subscriptionData.data.workflowEventListener; const newWorkflow = subscriptionData.data.workflowEventListener;
// Updating the query data // Updating the query data
let i = 0; let i = 0;
let totalNoOfWorkflows = prev.getWorkflowRuns.total_no_of_workflow_runs;
for (; i < modifiedWorkflows.length; i++) { for (; i < modifiedWorkflows.length; i++) {
if ( if (
modifiedWorkflows[i].workflow_run_id === newWorkflow.workflow_run_id modifiedWorkflows[i].workflow_run_id === newWorkflow.workflow_run_id
@ -92,34 +152,21 @@ const BrowseWorkflow: React.FC = () => {
break; break;
} }
} }
if (i === modifiedWorkflows.length) if (i === modifiedWorkflows.length && isFiltered(newWorkflow)) {
totalNoOfWorkflows++;
modifiedWorkflows.unshift(newWorkflow); modifiedWorkflows.unshift(newWorkflow);
}
return { ...prev, getWorkFlowRuns: modifiedWorkflows }; return {
getWorkflowRuns: {
total_no_of_workflow_runs: totalNoOfWorkflows,
workflow_runs: modifiedWorkflows,
},
};
}, },
}); });
}, [data]); }, [data]);
// States for filters
const [filters, setFilters] = useState<FilterOptions>({
search: '',
status: 'All',
cluster: 'All',
});
// State for sorting
const [sortData, setSortData] = useState<SortData>({
lastRun: { sort: true, ascending: true },
name: { sort: false, ascending: true },
noOfSteps: { sort: false, ascending: false },
});
// State for pagination
const [paginationData, setPaginationData] = useState<PaginationData>({
pageNo: 0,
rowsPerPage: 5,
});
const [popAnchorEl, setPopAnchorEl] = React.useState<null | HTMLElement>( const [popAnchorEl, setPopAnchorEl] = React.useState<null | HTMLElement>(
null null
); );
@ -136,96 +183,14 @@ const BrowseWorkflow: React.FC = () => {
setOpen(true); setOpen(true);
}; };
// State for start date and end date const workflowRuns = data?.getWorkflowRuns.workflow_runs;
const [dateRange, setDateRange] = React.useState<DateData>({
dateValue: 'Select a period',
fromDate: new Date(0).toString(),
toDate: new Date(new Date().setHours(23, 59, 59)).toString(),
});
const getClusters = (searchingData: WorkflowRun[]) => { // Functions passed as props in the headerSection
const uniqueList: string[] = [];
searchingData.forEach((data) => {
if (!uniqueList.includes(data.cluster_name)) {
uniqueList.push(data.cluster_name);
}
});
return uniqueList;
};
const filteredData = data?.getWorkFlowRuns
.filter((dataRow) =>
dataRow.workflow_name.toLowerCase().includes(filters.search.toLowerCase())
)
.filter((dataRow) =>
filters.status === 'All'
? true
: (JSON.parse(dataRow.execution_data) as ExecutionData).phase.includes(
filters.status
)
)
.filter((dataRow) =>
filters.cluster === 'All'
? true
: dataRow.cluster_name
.toLowerCase()
.includes(filters.cluster.toLowerCase())
)
.filter((dataRow) => {
return dateRange.fromDate && dateRange.toDate === undefined
? true
: parseInt(dataRow.last_updated, 10) * 1000 >=
new Date(moment(dateRange.fromDate).format()).getTime() &&
parseInt(dataRow.last_updated, 10) * 1000 <=
new Date(moment(dateRange.toDate).format()).getTime();
})
.sort((a: WorkflowRun, b: WorkflowRun) => {
// Sorting based on unique fields
if (sortData.name.sort) {
const x = a.workflow_name;
const y = b.workflow_name;
return sortData.name.ascending
? sortAlphaAsc(x, y)
: sortAlphaDesc(x, y);
}
if (sortData.lastRun.sort) {
const x = parseInt(a.last_updated, 10);
const y = parseInt(b.last_updated, 10);
return sortData.lastRun.ascending
? sortNumAsc(y, x)
: sortNumDesc(y, x);
}
return 0;
})
.sort((a: WorkflowRun, b: WorkflowRun) => {
// Sorting based on non-unique fields
if (sortData.noOfSteps.sort) {
const x = Object.keys(
(JSON.parse(a.execution_data) as ExecutionData).nodes
).length;
const y = Object.keys(
(JSON.parse(b.execution_data) as ExecutionData).nodes
).length;
return sortData.noOfSteps.ascending
? sortNumAsc(x, y)
: sortNumDesc(x, y);
}
return 0;
});
// Functions passed as props in the headerSeaction
const changeSearch = ( const changeSearch = (
event: React.ChangeEvent<HTMLTextAreaElement | HTMLInputElement> event: React.ChangeEvent<HTMLTextAreaElement | HTMLInputElement>
) => { ) => {
setFilters({ ...filters, search: event.target.value as string }); setFilters({ ...filters, workflow_name: event.target.value as string });
setPaginationData({ ...paginationData, pageNo: 0 }); setPaginationData({ ...paginationData, page: 0 });
}; };
const changeStatus = ( const changeStatus = (
@ -234,8 +199,11 @@ const BrowseWorkflow: React.FC = () => {
value: unknown; value: unknown;
}> }>
) => { ) => {
setFilters({ ...filters, status: event.target.value as string }); setFilters({
setPaginationData({ ...paginationData, pageNo: 0 }); ...filters,
workflow_status: event.target.value as WorkflowStatus,
});
setPaginationData({ ...paginationData, page: 0 });
}; };
const changeCluster = ( const changeCluster = (
@ -244,33 +212,34 @@ const BrowseWorkflow: React.FC = () => {
value: unknown; value: unknown;
}> }>
) => { ) => {
setFilters({ ...filters, cluster: event.target.value as string }); setFilters({ ...filters, cluster_name: event.target.value as string });
setPaginationData({ ...paginationData, pageNo: 0 }); setPaginationData({ ...paginationData, page: 0 });
}; };
// Function to set the date range for filtering // Function to set the date range for filtering
const dateChange = (selectFromDate: string, selectToDate: string) => { const dateChange = (selectStartDate: string, selectEndDate: string) => {
setDateRange({ // Change filter value for date range
dateValue: `${moment(selectFromDate) setFilters({
.format('DD.MM.YYYY') ...filters,
.toString()}-${moment(selectToDate).format('DD.MM.YYYY').toString()}`, date_range: {
fromDate: new Date(new Date(selectFromDate).setHours(0, 0, 0)).toString(), start_date: new Date(selectStartDate)
toDate: new Date(new Date(selectToDate).setHours(23, 59, 59)).toString(), .setHours(0, 0, 0)
.valueOf()
.toString(),
end_date: new Date(selectEndDate)
.setHours(23, 59, 59)
.valueOf()
.toString(),
},
}); });
};
// Function to validate execution_data JSON // Change the display value of date range
const dataPerRow = (dataRow: WorkflowRun) => { setDisplayDate(
let exe_data; `${moment(selectStartDate).format('DD.MM.YYYY').toString()}-${moment(
try { selectEndDate
exe_data = JSON.parse(dataRow.execution_data); )
} catch (error) { .format('DD.MM.YYYY')
console.error(error); .toString()}`
return <></>;
}
return (
<TableRow data-cy="WorkflowRunsTableRow" key={dataRow.workflow_run_id}>
<TableData data={dataRow} exeData={exe_data} />
</TableRow>
); );
}; };
@ -279,20 +248,19 @@ const BrowseWorkflow: React.FC = () => {
<section className="Heading section"> <section className="Heading section">
{/* Header Section */} {/* Header Section */}
<HeaderSection <HeaderSection
searchValue={filters.search} searchValue={filters.workflow_name}
changeSearch={changeSearch} changeSearch={changeSearch}
statusValue={filters.status} statusValue={filters.workflow_status}
changeStatus={changeStatus} changeStatus={changeStatus}
clusterValue={filters.cluster} clusterValue={filters.cluster_name}
changeCluster={changeCluster} changeCluster={changeCluster}
popOverClick={handlePopOverClick} popOverClick={handlePopOverClick}
popOverClose={handlePopOverClose} popOverClose={handlePopOverClose}
isOpen={isOpen} isOpen={isOpen}
data={data} clusterList={clusterList}
getClusters={getClusters}
popAnchorEl={popAnchorEl} popAnchorEl={popAnchorEl}
isDateOpen={open} isDateOpen={open}
displayDate={dateRange.dateValue} displayDate={displayDate}
selectDate={dateChange} selectDate={dateChange}
/> />
</section> </section>
@ -321,9 +289,7 @@ const BrowseWorkflow: React.FC = () => {
size="small" size="small"
onClick={() => onClick={() =>
setSortData({ setSortData({
...sortData, field: 'Name',
name: { sort: true, ascending: true },
lastRun: { sort: false, ascending: true },
}) })
} }
> >
@ -334,9 +300,8 @@ const BrowseWorkflow: React.FC = () => {
size="small" size="small"
onClick={() => onClick={() =>
setSortData({ setSortData({
...sortData, field: 'Name',
name: { sort: true, ascending: false }, descending: true,
lastRun: { sort: false, ascending: false },
}) })
} }
> >
@ -379,9 +344,8 @@ const BrowseWorkflow: React.FC = () => {
size="small" size="small"
onClick={() => onClick={() =>
setSortData({ setSortData({
...sortData, field: 'Time',
lastRun: { sort: true, ascending: true }, descending: true,
name: { sort: false, ascending: true },
}) })
} }
> >
@ -392,9 +356,7 @@ const BrowseWorkflow: React.FC = () => {
size="small" size="small"
onClick={() => onClick={() =>
setSortData({ setSortData({
...sortData, field: 'Time',
lastRun: { sort: true, ascending: false },
name: { sort: false, ascending: true },
}) })
} }
> >
@ -419,14 +381,15 @@ const BrowseWorkflow: React.FC = () => {
</Typography> </Typography>
</TableCell> </TableCell>
</TableRow> </TableRow>
) : filteredData && filteredData.length ? ( ) : workflowRuns && workflowRuns.length ? (
filteredData workflowRuns.map((dataRow) => (
.slice( <TableRow
paginationData.pageNo * paginationData.rowsPerPage, data-cy="WorkflowRunsTableRow"
paginationData.pageNo * paginationData.rowsPerPage + key={dataRow.workflow_run_id}
paginationData.rowsPerPage >
) <TableData data={dataRow} />
.map((dataRow) => dataPerRow(dataRow)) </TableRow>
))
) : ( ) : (
<TableRow> <TableRow>
<TableCell colSpan={7}> <TableCell colSpan={7}>
@ -442,19 +405,19 @@ const BrowseWorkflow: React.FC = () => {
{/* Pagination */} {/* Pagination */}
<TablePagination <TablePagination
rowsPerPageOptions={[5, 10, 25]} rowsPerPageOptions={[10, 25, 50]}
component="div" component="div"
count={filteredData?.length ?? 0} count={data?.getWorkflowRuns.total_no_of_workflow_runs ?? 0}
rowsPerPage={paginationData.rowsPerPage} rowsPerPage={paginationData.limit}
page={paginationData.pageNo} page={paginationData.page}
onChangePage={(_, page) => onChangePage={(_, page) =>
setPaginationData({ ...paginationData, pageNo: page }) setPaginationData({ ...paginationData, page })
} }
onChangeRowsPerPage={(event) => onChangeRowsPerPage={(event) =>
setPaginationData({ setPaginationData({
...paginationData, ...paginationData,
pageNo: 0, page: 0,
rowsPerPage: parseInt(event.target.value, 10), limit: parseInt(event.target.value, 10),
}) })
} }
/> />

View File

@ -19,7 +19,7 @@ import {
import useStyles from './styles'; import useStyles from './styles';
interface WorkflowRunCardProps { interface WorkflowRunCardProps {
data: WorkflowRun; data: Partial<WorkflowRun>;
} }
const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => { const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
@ -31,7 +31,7 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
const nodeSelection = useActions(NodeSelectionActions); const nodeSelection = useActions(NodeSelectionActions);
function getPhaseVariant(variant: string): string { function getPhaseVariant(variant: string | undefined): string {
switch (variant) { switch (variant) {
case SUCCEEDED: case SUCCEEDED:
return classes.succeeded; return classes.succeeded;
@ -56,8 +56,6 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
return classes.highScore; return classes.highScore;
} }
const executionData = JSON.parse(data.execution_data);
return ( return (
<Link <Link
underline="none" underline="none"
@ -80,7 +78,7 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
<div> <div>
<div className={classes.statusDiv}> <div className={classes.statusDiv}>
<svg viewBox="0 0 10 10"> <svg viewBox="0 0 10 10">
<circle className={getPhaseVariant(executionData.phase)} /> <circle className={getPhaseVariant(data.phase)} />
</svg> </svg>
<div> <div>
<Typography <Typography
@ -102,13 +100,12 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
)} )}
</Typography> </Typography>
<Typography <Typography
className={getResiliencyScoreVariant( className={getResiliencyScoreVariant(data.resiliency_score ?? 0)}
executionData.resiliency_score
)}
> >
{executionData.resiliency_score {data.resiliency_score === undefined ||
? `${executionData.resiliency_score}%` data.resiliency_score === null
: '--'} ? 'NA'
: `${data.resiliency_score}%`}
</Typography> </Typography>
</div> </div>

View File

@ -3,7 +3,7 @@ import { ButtonFilled } from 'litmus-ui';
import React from 'react'; import React from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { Link } from 'react-router-dom'; import { Link } from 'react-router-dom';
import { Workflow } from '../../../../models/graphql/workflowData'; import { WorkflowRun } from '../../../../models/graphql/workflowData';
import { history } from '../../../../redux/configureStore'; import { history } from '../../../../redux/configureStore';
import { import {
getProjectID, getProjectID,
@ -13,7 +13,7 @@ import useStyles from './styles';
import { WorkflowRunCard } from './WorkflowRunCard'; import { WorkflowRunCard } from './WorkflowRunCard';
interface RecentWorkflowRunsProps { interface RecentWorkflowRunsProps {
data: Workflow; data: Partial<WorkflowRun>[];
} }
const RecentWorkflowRuns: React.FC<RecentWorkflowRunsProps> = ({ data }) => { const RecentWorkflowRuns: React.FC<RecentWorkflowRunsProps> = ({ data }) => {
@ -23,8 +23,6 @@ const RecentWorkflowRuns: React.FC<RecentWorkflowRunsProps> = ({ data }) => {
const projectID = getProjectID(); const projectID = getProjectID();
const projectRole = getProjectRole(); const projectRole = getProjectRole();
const filteredData = data.getWorkFlowRuns.slice(-3).reverse();
return ( return (
<Paper className={classes.workflowRunContainer}> <Paper className={classes.workflowRunContainer}>
{/* Heading section of the container */} {/* Heading section of the container */}
@ -59,7 +57,7 @@ const RecentWorkflowRuns: React.FC<RecentWorkflowRunsProps> = ({ data }) => {
{/* WorkflowRuns Data */} {/* WorkflowRuns Data */}
{filteredData.map((workflow) => { {data.map((workflow) => {
return <WorkflowRunCard key={workflow.workflow_id} data={workflow} />; return <WorkflowRunCard key={workflow.workflow_id} data={workflow} />;
})} })}
</Paper> </Paper>

View File

@ -35,12 +35,20 @@ const AgentConfiguredHome: React.FC<AgentConfiguredHomeProps> = ({
const { data, loading, error } = useQuery<Workflow, WorkflowDataVars>( const { data, loading, error } = useQuery<Workflow, WorkflowDataVars>(
WORKFLOW_DETAILS, WORKFLOW_DETAILS,
{ {
variables: { projectID }, variables: {
workflowRunsInput: {
project_id: projectID,
pagination: {
page: 0,
limit: 3,
},
},
},
fetchPolicy: 'cache-and-network', fetchPolicy: 'cache-and-network',
} }
); );
let workflowRunCount = 0; const workflowRunCount = data?.getWorkflowRuns.total_no_of_workflow_runs ?? 0;
if (error) { if (error) {
console.error('Error fetching Workflow Data'); console.error('Error fetching Workflow Data');
@ -51,24 +59,14 @@ const AgentConfiguredHome: React.FC<AgentConfiguredHomeProps> = ({
); );
} }
if (data) {
workflowRunCount = data.getWorkFlowRuns.length;
} else {
return (
<Center>
<Loader />
</Center>
);
}
return ( return (
<div> <div>
{loading ? ( {loading ? (
<Center> <Center>
<Loader /> <Loader />
</Center> </Center>
) : workflowRunCount > 0 ? ( ) : data && workflowRunCount > 0 ? (
<RecentWorkflowRuns data={data} /> <RecentWorkflowRuns data={data.getWorkflowRuns.workflow_runs} />
) : ( ) : (
<MainInfoContainer <MainInfoContainer
src="./icons/workflowScheduleHome.svg" src="./icons/workflowScheduleHome.svg"

View File

@ -6,13 +6,17 @@ import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import YAML from 'yaml'; import YAML from 'yaml';
import { StyledTab, TabPanel } from '../../../components/Tabs'; import { StyledTab, TabPanel } from '../../../components/Tabs';
import { WORKFLOW_DETAILS, WORKFLOW_LOGS } from '../../../graphql'; import {
WORKFLOW_DETAILS_WITH_EXEC_DATA,
WORKFLOW_LOGS,
} from '../../../graphql';
import { import {
PodLog, PodLog,
PodLogRequest, PodLogRequest,
PodLogVars, PodLogVars,
} from '../../../models/graphql/podLog'; } from '../../../models/graphql/podLog';
import { import {
ExecutionData,
Workflow, Workflow,
WorkflowDataVars, WorkflowDataVars,
} from '../../../models/graphql/workflowData'; } from '../../../models/graphql/workflowData';
@ -47,13 +51,18 @@ const LogsSwitcher: React.FC<LogsSwitcherProps> = ({
const projectID = getProjectID(); const projectID = getProjectID();
const { data: workflow_data } = useQuery<Workflow, WorkflowDataVars>( const { data: workflow_data } = useQuery<Workflow, WorkflowDataVars>(
WORKFLOW_DETAILS, WORKFLOW_DETAILS_WITH_EXEC_DATA,
{ variables: { projectID } } {
variables: {
workflowRunsInput: {
project_id: projectID,
workflow_run_ids: [workflow_run_id],
},
},
}
); );
const workflow = workflow_data?.getWorkFlowRuns.filter( const workflow = workflow_data?.getWorkflowRuns.workflow_runs[0];
(w) => w.workflow_run_id === workflow_run_id
)[0];
const [chaosData, setChaosData] = useState<ChaosDataVar>({ const [chaosData, setChaosData] = useState<ChaosDataVar>({
exp_pod: '', exp_pod: '',
@ -63,7 +72,8 @@ const LogsSwitcher: React.FC<LogsSwitcherProps> = ({
useEffect(() => { useEffect(() => {
if (workflow !== undefined) { if (workflow !== undefined) {
const nodeData = JSON.parse(workflow.execution_data).nodes[pod_name]; const nodeData = (JSON.parse(workflow.execution_data) as ExecutionData)
.nodes[pod_name];
if (nodeData && nodeData.chaosData) if (nodeData && nodeData.chaosData)
setChaosData({ setChaosData({
exp_pod: nodeData.chaosData.experimentPod, exp_pod: nodeData.chaosData.experimentPod,
@ -83,7 +93,8 @@ const LogsSwitcher: React.FC<LogsSwitcherProps> = ({
useEffect(() => { useEffect(() => {
if (workflow !== undefined) { if (workflow !== undefined) {
const nodeData = JSON.parse(workflow.execution_data).nodes[pod_name]; const nodeData = (JSON.parse(workflow.execution_data) as ExecutionData)
.nodes[pod_name];
if (nodeData?.chaosData?.chaosResult) { if (nodeData?.chaosData?.chaosResult) {
setChaosResult(YAML.stringify(nodeData.chaosData?.chaosResult)); setChaosResult(YAML.stringify(nodeData.chaosData?.chaosResult));
} else { } else {
@ -118,8 +129,8 @@ const LogsSwitcher: React.FC<LogsSwitcherProps> = ({
} }
if ( if (
workflow !== undefined && workflow !== undefined &&
JSON.parse(workflow?.execution_data).nodes[pod_name].type === (JSON.parse(workflow.execution_data) as ExecutionData).nodes[pod_name]
'ChaosEngine' .type === 'ChaosEngine'
) { ) {
return t('workflowDetailsView.nodeLogs.chaosLogs'); return t('workflowDetailsView.nodeLogs.chaosLogs');
} }

View File

@ -1,15 +1,16 @@
import { Typography } from '@material-ui/core'; import { Typography } from '@material-ui/core';
import { ButtonOutlined } from 'litmus-ui';
import React from 'react'; import React from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { ButtonOutlined } from 'litmus-ui'; import TimePopOver from '../../../components/TimePopOver';
import { ExecutionData } from '../../../models/graphql/workflowData'; import { ExecutionData } from '../../../models/graphql/workflowData';
import useStyles from './styles'; import useStyles from './styles';
import TimePopOver from '../../../components/TimePopOver';
interface WorkflowInfoProps { interface WorkflowInfoProps {
setIsInfoToggled?: React.Dispatch<React.SetStateAction<boolean>>; setIsInfoToggled?: React.Dispatch<React.SetStateAction<boolean>>;
tab: number; tab: number;
data: ExecutionData; data: ExecutionData;
resiliency_score?: number;
cluster_name: string; cluster_name: string;
} }
@ -17,6 +18,7 @@ const WorkflowInfo: React.FC<WorkflowInfoProps> = ({
setIsInfoToggled, setIsInfoToggled,
tab, tab,
data, data,
resiliency_score,
cluster_name, cluster_name,
}) => { }) => {
const classes = useStyles(); const classes = useStyles();
@ -55,10 +57,10 @@ const WorkflowInfo: React.FC<WorkflowInfoProps> = ({
{t('workflowDetailsView.workflowInfo.resilienceScore')} {t('workflowDetailsView.workflowInfo.resilienceScore')}
</Typography> </Typography>
{/* Static data, will be changed with API response */} {/* Static data, will be changed with API response */}
<Typography className={classes.resilliencyScore}> <Typography className={classes.resiliencyScore}>
{data.resiliency_score === undefined {resiliency_score === undefined || resiliency_score === null
? 'NA' ? 'NA'
: `${data.resiliency_score}%`} : `${resiliency_score}%`}
</Typography> </Typography>
</div> </div>

View File

@ -55,7 +55,7 @@ const useStyles = makeStyles((theme) => ({
fontSize: '1rem', fontSize: '1rem',
}, },
resilliencyScore: { resiliencyScore: {
color: theme.palette.highlight, color: theme.palette.highlight,
fontSize: '1.5rem', fontSize: '1.5rem',
}, },

View File

@ -76,7 +76,6 @@ github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4Rq
github.com/agnivade/levenshtein v1.0.3 h1:M5ZnqLOoZR8ygVq0FfkXsNOKzMCk0xRiow0R5+5VkQ0= github.com/agnivade/levenshtein v1.0.3 h1:M5ZnqLOoZR8ygVq0FfkXsNOKzMCk0xRiow0R5+5VkQ0=
github.com/agnivade/levenshtein v1.0.3/go.mod h1:4SFRZbbXWLF4MU1T9Qg0pGgH3Pjs+t6ie5efyrwRJXs= github.com/agnivade/levenshtein v1.0.3/go.mod h1:4SFRZbbXWLF4MU1T9Qg0pGgH3Pjs+t6ie5efyrwRJXs=
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
@ -84,16 +83,13 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/aliyun/aliyun-oss-go-sdk v2.0.6+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= github.com/aliyun/aliyun-oss-go-sdk v2.0.6+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/ant31/crd-validation v0.0.0-20180702145049-30f8a35d0ac2/go.mod h1:X0noFIik9YqfhGYBLEHg8LJKEwy7QIitLQuFMpKLcPk= github.com/ant31/crd-validation v0.0.0-20180702145049-30f8a35d0ac2/go.mod h1:X0noFIik9YqfhGYBLEHg8LJKEwy7QIitLQuFMpKLcPk=
github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q=
github.com/antonmedv/expr v1.8.2/go.mod h1:5qsM3oLGDND7sDmQGDXHkYfkjYMUX14qsgqmHhwGEk8= github.com/antonmedv/expr v1.8.2/go.mod h1:5qsM3oLGDND7sDmQGDXHkYfkjYMUX14qsgqmHhwGEk8=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
github.com/argoproj/argo v0.0.0-20200806220847-5759a0e198d3 h1:UbCWw+VjeyicEGnFvBIGzOYCKuCqrRUzlxSbzaHcXug= github.com/argoproj/argo v0.0.0-20200806220847-5759a0e198d3 h1:UbCWw+VjeyicEGnFvBIGzOYCKuCqrRUzlxSbzaHcXug=
github.com/argoproj/argo v0.0.0-20200806220847-5759a0e198d3/go.mod h1:M0Up9o5uqIZvRh/vh8eJR27s6H+UlkiS1PBUQAIq4Hw= github.com/argoproj/argo v0.0.0-20200806220847-5759a0e198d3/go.mod h1:M0Up9o5uqIZvRh/vh8eJR27s6H+UlkiS1PBUQAIq4Hw=
@ -102,7 +98,6 @@ github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hC
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
@ -119,7 +114,6 @@ github.com/bazelbuild/bazel-gazelle v0.0.0-20181012220611-c728ce9f663e/go.mod h1
github.com/bazelbuild/buildtools v0.0.0-20180226164855-80c7f0d45d7e/go.mod h1:5JP0TXzWDHXv8qvxRC4InIazwdyDseBDbzESUMKk1yU= github.com/bazelbuild/buildtools v0.0.0-20180226164855-80c7f0d45d7e/go.mod h1:5JP0TXzWDHXv8qvxRC4InIazwdyDseBDbzESUMKk1yU=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bifurcation/mint v0.0.0-20180715133206-93c51c6ce115/go.mod h1:zVt7zX3K/aDCk9Tj+VM7YymsX66ERvzCJzw8rFCX2JU= github.com/bifurcation/mint v0.0.0-20180715133206-93c51c6ce115/go.mod h1:zVt7zX3K/aDCk9Tj+VM7YymsX66ERvzCJzw8rFCX2JU=
@ -140,10 +134,8 @@ github.com/cenkalti/backoff v2.1.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QH
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/prettybench v0.0.0-20150116022406-03b8cfe5406c/go.mod h1:Xe6ZsFhtM8HrDku0pxJ3/Lr51rwykrzgFwpmTzleatY= github.com/cespare/prettybench v0.0.0-20150116022406-03b8cfe5406c/go.mod h1:Xe6ZsFhtM8HrDku0pxJ3/Lr51rwykrzgFwpmTzleatY=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM= github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM=
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5/go.mod h1:/iP1qXHoty45bqomnu2LM+VVyAEdWN+vtSHGlQgyxbw= github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5/go.mod h1:/iP1qXHoty45bqomnu2LM+VVyAEdWN+vtSHGlQgyxbw=
github.com/checkpoint-restore/go-criu v0.0.0-20190109184317-bdb7599cd87b/go.mod h1:TrMrLQfeENAPYPRsJuq3jsqdlRh3lvi6trTZJG8+tho= github.com/checkpoint-restore/go-criu v0.0.0-20190109184317-bdb7599cd87b/go.mod h1:TrMrLQfeENAPYPRsJuq3jsqdlRh3lvi6trTZJG8+tho=
@ -217,7 +209,6 @@ github.com/denisenkom/go-mssqldb v0.0.0-20190515213511-eb9f6a1743f3/go.mod h1:zA
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c h1:TUuUh0Xgj97tLMNtWtNvI9mIV6isjEb9lBMNv+77IGM=
github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
github.com/dhui/dktest v0.3.0/go.mod h1:cyzIUfGsBEbZ6BT7tnXqAShHSXCZhSNmFl70sZ7c1yc= github.com/dhui/dktest v0.3.0/go.mod h1:cyzIUfGsBEbZ6BT7tnXqAShHSXCZhSNmFl70sZ7c1yc=
github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=
@ -259,7 +250,6 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
github.com/euank/go-kmsg-parser v2.0.0+incompatible/go.mod h1:MhmAMZ8V4CYH4ybgdRwPr2TU5ThnS43puaKEMpja1uw= github.com/euank/go-kmsg-parser v2.0.0+incompatible/go.mod h1:MhmAMZ8V4CYH4ybgdRwPr2TU5ThnS43puaKEMpja1uw=
github.com/evanphx/json-patch v4.1.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.1.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/evanphx/json-patch v4.5.0+incompatible h1:ouOWdg56aJriqS0huScTkVXPC5IcNrDCXZ6OoTAWu7M=
github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4= github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4=
github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8=
@ -269,13 +259,11 @@ github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5Kwzbycv
github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
github.com/fatih/structtag v1.1.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/fatih/structtag v1.1.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4=
github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fsouza/fake-gcs-server v1.7.0/go.mod h1:5XIRs4YvwNbNoz+1JF8j6KLAyDh7RHGAyAK3EP2EsNk= github.com/fsouza/fake-gcs-server v1.7.0/go.mod h1:5XIRs4YvwNbNoz+1JF8j6KLAyDh7RHGAyAK3EP2EsNk=
github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
@ -287,7 +275,6 @@ github.com/ghodss/yaml v0.0.0-20180820084758-c7ce16629ff4/go.mod h1:4dBDuWmgqj2H
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 h1:Mn26/9ZMNWSw9C9ERFA1PUxfmGpolnw2v0bKOREu5ew= github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 h1:Mn26/9ZMNWSw9C9ERFA1PUxfmGpolnw2v0bKOREu5ew=
github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32/go.mod h1:GIjDIg/heH5DOkXY3YJ/wNhfHsQHoXGjl8G8amsYQ1I= github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32/go.mod h1:GIjDIg/heH5DOkXY3YJ/wNhfHsQHoXGjl8G8amsYQ1I=
github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=
github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
@ -298,7 +285,6 @@ github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=
github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
github.com/go-git/go-billy/v5 v5.0.0 h1:7NQHvd9FVid8VL4qVUMm8XifBK+2xCoZ2lSk0agRrHM= github.com/go-git/go-billy/v5 v5.0.0 h1:7NQHvd9FVid8VL4qVUMm8XifBK+2xCoZ2lSk0agRrHM=
github.com/go-git/go-billy/v5 v5.0.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= github.com/go-git/go-billy/v5 v5.0.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12 h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M=
github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw= github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw=
github.com/go-git/go-git/v5 v5.2.0 h1:YPBLG/3UK1we1ohRkncLjaXWLW+HKp5QNM/jTli2JgI= github.com/go-git/go-git/v5 v5.2.0 h1:YPBLG/3UK1we1ohRkncLjaXWLW+HKp5QNM/jTli2JgI=
github.com/go-git/go-git/v5 v5.2.0/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs= github.com/go-git/go-git/v5 v5.2.0/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs=
@ -312,7 +298,6 @@ github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgO
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-logr/logr v0.1.0 h1:M1Tv3VzNlEHg6uyACnRdtrploV2P7wZqH8BoQMtz0cg=
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
github.com/go-logr/zapr v0.1.0/go.mod h1:tabnROwaDl0UNxkVeFRbY8bwB37GwRv0P8lg6aAiEnk= github.com/go-logr/zapr v0.1.0/go.mod h1:tabnROwaDl0UNxkVeFRbY8bwB37GwRv0P8lg6aAiEnk=
github.com/go-logr/zapr v0.1.1/go.mod h1:tabnROwaDl0UNxkVeFRbY8bwB37GwRv0P8lg6aAiEnk= github.com/go-logr/zapr v0.1.1/go.mod h1:tabnROwaDl0UNxkVeFRbY8bwB37GwRv0P8lg6aAiEnk=
@ -419,7 +404,6 @@ github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4er
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191027212112-611e8accdfc9/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191027212112-611e8accdfc9/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
@ -439,7 +423,6 @@ github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0 h1:oOuy+ugB+P/kBdUnG5QaMXSIyJ1q38wWSojYCb3z5VQ=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM=
@ -458,9 +441,7 @@ github.com/google/certificate-transparency-go v1.0.21/go.mod h1:QeJfpSbVSfYc7RgB
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
@ -552,7 +533,6 @@ github.com/heketi/rest v0.0.0-20180404230133-aa6a65207413/go.mod h1:BeS3M108VzVl
github.com/heketi/tests v0.0.0-20151005000721-f3775cbcefd6/go.mod h1:xGMAM8JLi7UkZt1i4FQeQy0R2T8GLUwQhOP5M1gBhy4= github.com/heketi/tests v0.0.0-20151005000721-f3775cbcefd6/go.mod h1:xGMAM8JLi7UkZt1i4FQeQy0R2T8GLUwQhOP5M1gBhy4=
github.com/heketi/utils v0.0.0-20170317161834-435bc5bdfa64/go.mod h1:RYlF4ghFZPPmk2TC5REt5OFwvfb6lzxFWrTWB+qs28s= github.com/heketi/utils v0.0.0-20170317161834-435bc5bdfa64/go.mod h1:RYlF4ghFZPPmk2TC5REt5OFwvfb6lzxFWrTWB+qs28s=
github.com/helm/helm-2to3 v0.2.0/go.mod h1:jQUVAWB0bM7zNIqKPIfHFzuFSK0kHYovJrjO+hqcvRk= github.com/helm/helm-2to3 v0.2.0/go.mod h1:jQUVAWB0bM7zNIqKPIfHFzuFSK0kHYovJrjO+hqcvRk=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4=
github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg=
@ -585,14 +565,12 @@ github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhB
github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8= github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/jpillora/go-ogle-analytics v0.0.0-20161213085824-14b04e0594ef/go.mod h1:PlwhC7q1VSK73InDzdDatVetQrTsQHIbOvcJAZzitY0= github.com/jpillora/go-ogle-analytics v0.0.0-20161213085824-14b04e0594ef/go.mod h1:PlwhC7q1VSK73InDzdDatVetQrTsQHIbOvcJAZzitY0=
github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
@ -621,9 +599,7 @@ github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0
github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
@ -632,7 +608,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kshvakov/clickhouse v1.3.5/go.mod h1:DMzX7FxRymoNkVgizH0DWAL8Cur7wHLgx3MUnGwJqpE= github.com/kshvakov/clickhouse v1.3.5/go.mod h1:DMzX7FxRymoNkVgizH0DWAL8Cur7wHLgx3MUnGwJqpE=
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
@ -647,6 +622,8 @@ github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z
github.com/litmuschaos/chaos-operator v0.0.0-20210126054859-85bb0ad85bfa h1:lBEvg10ZPndmBUMtVaMRVCqeKnKYmjtRSg2SF4iTQ7o= github.com/litmuschaos/chaos-operator v0.0.0-20210126054859-85bb0ad85bfa h1:lBEvg10ZPndmBUMtVaMRVCqeKnKYmjtRSg2SF4iTQ7o=
github.com/litmuschaos/chaos-operator v0.0.0-20210126054859-85bb0ad85bfa/go.mod h1:Z2GpYjqXwFd8bx+kv58YEQFxynx1v9PMGCGTQFRVnFQ= github.com/litmuschaos/chaos-operator v0.0.0-20210126054859-85bb0ad85bfa/go.mod h1:Z2GpYjqXwFd8bx+kv58YEQFxynx1v9PMGCGTQFRVnFQ=
github.com/litmuschaos/elves v0.0.0-20201107015738-552d74669e3c/go.mod h1:DsbHGNUq/78NZozWVVI9Q6eBei4I+JjlkkD5aibJ3MQ= github.com/litmuschaos/elves v0.0.0-20201107015738-552d74669e3c/go.mod h1:DsbHGNUq/78NZozWVVI9Q6eBei4I+JjlkkD5aibJ3MQ=
github.com/litmuschaos/litmus v0.0.0-20210602074504-1b424623457f h1:CLezzbI5dn/WOXj/cDt9SbKA0cAko7M2tHbWzuvytpo=
github.com/litmuschaos/litmus v0.0.0-20210603121521-c77e5d24fea8 h1:HzWuTLL5/LsNlKt7SicBvLxf5Fk5E82SsGjUDqAw4qc=
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/lpabon/godbc v0.1.1/go.mod h1:Jo9QV0cf3U6jZABgiJ2skINAXb9j8m51r07g4KI92ZA= github.com/lpabon/godbc v0.1.1/go.mod h1:Jo9QV0cf3U6jZABgiJ2skINAXb9j8m51r07g4KI92ZA=
github.com/lucas-clemente/aes12 v0.0.0-20171027163421-cd47fb39b79f/go.mod h1:JpH9J1c9oX6otFSgdUHwUBUizmKlrMjxWnIAjff4m04= github.com/lucas-clemente/aes12 v0.0.0-20171027163421-cd47fb39b79f/go.mod h1:JpH9J1c9oX6otFSgdUHwUBUizmKlrMjxWnIAjff4m04=
@ -689,7 +666,6 @@ github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m
github.com/mattn/go-shellwords v1.0.5/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= github.com/mattn/go-shellwords v1.0.5/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/maxbrunsfeld/counterfeiter/v6 v6.2.1/go.mod h1:F9YacGpnZbLQMzuPI0rR6op21YvNu/RjL705LJJpM3k= github.com/maxbrunsfeld/counterfeiter/v6 v6.2.1/go.mod h1:F9YacGpnZbLQMzuPI0rR6op21YvNu/RjL705LJJpM3k=
github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY= github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY=
@ -735,7 +711,6 @@ github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8m
github.com/munnerz/goautoneg v0.0.0-20190414153302-2ae31c8b6b30/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/munnerz/goautoneg v0.0.0-20190414153302-2ae31c8b6b30/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mvdan/xurls v1.1.0/go.mod h1:tQlNn3BED8bE/15hnSL2HLkDeLWpNPAwtw7wkEq44oU= github.com/mvdan/xurls v1.1.0/go.mod h1:tQlNn3BED8bE/15hnSL2HLkDeLWpNPAwtw7wkEq44oU=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
github.com/nakagami/firebirdsql v0.0.0-20190310045651-3c02a58cfed8/go.mod h1:86wM1zFnC6/uDBfZGNwB65O+pR2OFi5q/YQaEUid1qA= github.com/nakagami/firebirdsql v0.0.0-20190310045651-3c02a58cfed8/go.mod h1:86wM1zFnC6/uDBfZGNwB65O+pR2OFi5q/YQaEUid1qA=
@ -748,7 +723,6 @@ github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzE
github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
@ -759,13 +733,11 @@ github.com/onsi/ginkgo v1.4.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.10.1 h1:q/mM8GF/n0shIN8SaAZ0V+jnLPzen6WIVZdiwrRlMlo=
github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.3.0/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.3.0/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.0 h1:XPnZz8VVBHjVsy1vzJmRwIcSwiUO+JFfrv/xGiigmME=
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
@ -818,7 +790,6 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA=
github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
@ -840,7 +811,6 @@ github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
@ -863,7 +833,6 @@ github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDa
github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/prometheus v2.3.2+incompatible/go.mod h1:oAIUtOny2rjMX0OWN5vPR5/q/twIROJvdqnQKDdil/s= github.com/prometheus/prometheus v2.3.2+incompatible/go.mod h1:oAIUtOny2rjMX0OWN5vPR5/q/twIROJvdqnQKDdil/s=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
@ -949,7 +918,6 @@ github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/syndtr/gocapability v0.0.0-20160928074757-e7cb7fa329f4/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20160928074757-e7cb7fa329f4/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/thecodeteam/goscaleio v0.1.0/go.mod h1:68sdkZAsK8bvEwBlbQnlLS+xU+hvLYM/iQ8KXej1AwM= github.com/thecodeteam/goscaleio v0.1.0/go.mod h1:68sdkZAsK8bvEwBlbQnlLS+xU+hvLYM/iQ8KXej1AwM=
@ -1323,7 +1291,6 @@ google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc=
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190128161407-8ac453e89fca/go.mod h1:L3J43x8/uS+qIUoksaLKe6OS3nUKxOKuIFz1sl2/jx4= google.golang.org/genproto v0.0.0-20190128161407-8ac453e89fca/go.mod h1:L3J43x8/uS+qIUoksaLKe6OS3nUKxOKuIFz1sl2/jx4=
@ -1376,11 +1343,9 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/gcfg.v1 v1.2.0/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= gopkg.in/gcfg.v1 v1.2.0/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
@ -1404,11 +1369,9 @@ gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76
gopkg.in/square/go-jose.v2 v2.5.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/square/go-jose.v2 v2.5.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg= gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=
gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98= gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98=
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0 h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g= gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g=
gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE= gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=
gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8= gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/warnings.v0 v0.1.1/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/warnings.v0 v0.1.1/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=

View File

@ -150,6 +150,11 @@ type ComplexityRoot struct {
Name func(childComplexity int) int Name func(childComplexity int) int
} }
GetWorkflowsOutput struct {
TotalNoOfWorkflowRuns func(childComplexity int) int
WorkflowRuns func(childComplexity int) int
}
GitConfigResponse struct { GitConfigResponse struct {
AuthType func(childComplexity int) int AuthType func(childComplexity int) int
Branch func(childComplexity int) int Branch func(childComplexity int) int
@ -337,7 +342,7 @@ type ComplexityRoot struct {
GetScheduledWorkflows func(childComplexity int, projectID string) int GetScheduledWorkflows func(childComplexity int, projectID string) int
GetTemplateManifestByID func(childComplexity int, templateID string) int GetTemplateManifestByID func(childComplexity int, templateID string) int
GetUser func(childComplexity int, username string) int GetUser func(childComplexity int, username string) int
GetWorkFlowRuns func(childComplexity int, projectID string) int GetWorkflowRuns func(childComplexity int, workflowRunsInput model.GetWorkflowRunsInput) int
GetYAMLData func(childComplexity int, experimentInput model.ExperimentInput) int GetYAMLData func(childComplexity int, experimentInput model.ExperimentInput) int
ListDashboard func(childComplexity int, projectID string) int ListDashboard func(childComplexity int, projectID string) int
ListDataSource func(childComplexity int, projectID string) int ListDataSource func(childComplexity int, projectID string) int
@ -433,8 +438,12 @@ type ComplexityRoot struct {
ClusterName func(childComplexity int) int ClusterName func(childComplexity int) int
ClusterType func(childComplexity int) int ClusterType func(childComplexity int) int
ExecutionData func(childComplexity int) int ExecutionData func(childComplexity int) int
ExperimentsPassed func(childComplexity int) int
LastUpdated func(childComplexity int) int LastUpdated func(childComplexity int) int
Phase func(childComplexity int) int
ProjectID func(childComplexity int) int ProjectID func(childComplexity int) int
ResiliencyScore func(childComplexity int) int
TotalExperiments func(childComplexity int) int
WorkflowID func(childComplexity int) int WorkflowID func(childComplexity int) int
WorkflowName func(childComplexity int) int WorkflowName func(childComplexity int) int
WorkflowRunID func(childComplexity int) int WorkflowRunID func(childComplexity int) int
@ -608,7 +617,7 @@ type MutationResolver interface {
DeleteImageRegistry(ctx context.Context, imageRegistryID string, projectID string) (string, error) DeleteImageRegistry(ctx context.Context, imageRegistryID string, projectID string) (string, error)
} }
type QueryResolver interface { type QueryResolver interface {
GetWorkFlowRuns(ctx context.Context, projectID string) ([]*model.WorkflowRun, error) GetWorkflowRuns(ctx context.Context, workflowRunsInput model.GetWorkflowRunsInput) (*model.GetWorkflowsOutput, error)
GetCluster(ctx context.Context, projectID string, clusterType *string) ([]*model.Cluster, error) GetCluster(ctx context.Context, projectID string, clusterType *string) ([]*model.Cluster, error)
GetUser(ctx context.Context, username string) (*model.User, error) GetUser(ctx context.Context, username string) (*model.User, error)
GetProject(ctx context.Context, projectID string) (*model.Project, error) GetProject(ctx context.Context, projectID string) (*model.Project, error)
@ -1146,6 +1155,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Experiments.Name(childComplexity), true return e.complexity.Experiments.Name(childComplexity), true
case "GetWorkflowsOutput.total_no_of_workflow_runs":
if e.complexity.GetWorkflowsOutput.TotalNoOfWorkflowRuns == nil {
break
}
return e.complexity.GetWorkflowsOutput.TotalNoOfWorkflowRuns(childComplexity), true
case "GetWorkflowsOutput.workflow_runs":
if e.complexity.GetWorkflowsOutput.WorkflowRuns == nil {
break
}
return e.complexity.GetWorkflowsOutput.WorkflowRuns(childComplexity), true
case "GitConfigResponse.AuthType": case "GitConfigResponse.AuthType":
if e.complexity.GitConfigResponse.AuthType == nil { if e.complexity.GitConfigResponse.AuthType == nil {
break break
@ -2413,17 +2436,17 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Query.GetUser(childComplexity, args["username"].(string)), true return e.complexity.Query.GetUser(childComplexity, args["username"].(string)), true
case "Query.getWorkFlowRuns": case "Query.getWorkflowRuns":
if e.complexity.Query.GetWorkFlowRuns == nil { if e.complexity.Query.GetWorkflowRuns == nil {
break break
} }
args, err := ec.field_Query_getWorkFlowRuns_args(context.TODO(), rawArgs) args, err := ec.field_Query_getWorkflowRuns_args(context.TODO(), rawArgs)
if err != nil { if err != nil {
return 0, false return 0, false
} }
return e.complexity.Query.GetWorkFlowRuns(childComplexity, args["project_id"].(string)), true return e.complexity.Query.GetWorkflowRuns(childComplexity, args["workflowRunsInput"].(model.GetWorkflowRunsInput)), true
case "Query.getYAMLData": case "Query.getYAMLData":
if e.complexity.Query.GetYAMLData == nil { if e.complexity.Query.GetYAMLData == nil {
@ -2998,6 +3021,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.WorkflowRun.ExecutionData(childComplexity), true return e.complexity.WorkflowRun.ExecutionData(childComplexity), true
case "WorkflowRun.experiments_passed":
if e.complexity.WorkflowRun.ExperimentsPassed == nil {
break
}
return e.complexity.WorkflowRun.ExperimentsPassed(childComplexity), true
case "WorkflowRun.last_updated": case "WorkflowRun.last_updated":
if e.complexity.WorkflowRun.LastUpdated == nil { if e.complexity.WorkflowRun.LastUpdated == nil {
break break
@ -3005,6 +3035,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.WorkflowRun.LastUpdated(childComplexity), true return e.complexity.WorkflowRun.LastUpdated(childComplexity), true
case "WorkflowRun.phase":
if e.complexity.WorkflowRun.Phase == nil {
break
}
return e.complexity.WorkflowRun.Phase(childComplexity), true
case "WorkflowRun.project_id": case "WorkflowRun.project_id":
if e.complexity.WorkflowRun.ProjectID == nil { if e.complexity.WorkflowRun.ProjectID == nil {
break break
@ -3012,6 +3049,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.WorkflowRun.ProjectID(childComplexity), true return e.complexity.WorkflowRun.ProjectID(childComplexity), true
case "WorkflowRun.resiliency_score":
if e.complexity.WorkflowRun.ResiliencyScore == nil {
break
}
return e.complexity.WorkflowRun.ResiliencyScore(childComplexity), true
case "WorkflowRun.total_experiments":
if e.complexity.WorkflowRun.TotalExperiments == nil {
break
}
return e.complexity.WorkflowRun.TotalExperiments(childComplexity), true
case "WorkflowRun.workflow_id": case "WorkflowRun.workflow_id":
if e.complexity.WorkflowRun.WorkflowID == nil { if e.complexity.WorkflowRun.WorkflowID == nil {
break break
@ -4158,18 +4209,6 @@ type ChaosWorkFlowResponse {
isCustomWorkflow: Boolean! isCustomWorkflow: Boolean!
} }
type WorkflowRun {
workflow_run_id: ID!
workflow_id: ID!
cluster_name: String!
last_updated: String!
project_id: ID!
cluster_id: ID!
workflow_name: String!
cluster_type: String
execution_data: String!
}
input WorkflowRunInput { input WorkflowRunInput {
workflow_id: ID! workflow_id: ID!
workflow_run_id: ID! workflow_run_id: ID!
@ -4326,8 +4365,9 @@ input KubeGVRRequest {
} }
type Query { type Query {
# [Deprecated soon] getWorkflowRuns(
getWorkFlowRuns(project_id: String!): [WorkflowRun!]! @authorized workflowRunsInput: GetWorkflowRunsInput!
): GetWorkflowsOutput! @authorized
getCluster(project_id: String!, cluster_type: String): [Cluster!]! @authorized getCluster(project_id: String!, cluster_type: String): [Cluster!]! @authorized
@ -4362,7 +4402,8 @@ type Query {
GetPromQuery(query: promInput): promResponse! @authorized GetPromQuery(query: promInput): promResponse! @authorized
GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse! @authorized GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse!
@authorized
GetPromSeriesList(ds_details: dsDetails): promSeriesListResponse! @authorized GetPromSeriesList(ds_details: dsDetails): promSeriesListResponse! @authorized
@ -4379,7 +4420,10 @@ type Query {
#Image Registry Queries #Image Registry Queries
ListImageRegistry(project_id: String!): [ImageRegistryResponse!] @authorized ListImageRegistry(project_id: String!): [ImageRegistryResponse!] @authorized
GetImageRegistry(image_registry_id: String!, project_id: String!): ImageRegistryResponse! @authorized GetImageRegistry(
image_registry_id: String!
project_id: String!
): ImageRegistryResponse! @authorized
} }
type Mutation { type Mutation {
@ -4417,7 +4461,8 @@ type Mutation {
leaveProject(member: MemberInput!): String! @authorized leaveProject(member: MemberInput!): String! @authorized
#Used to update project name #Used to update project name
updateProjectName(projectID: String!, projectName: String!): String! @authorized updateProjectName(projectID: String!, projectName: String!): String!
@authorized
#It is used to confirm the subscriber registration #It is used to confirm the subscriber registration
clusterConfirm(identity: ClusterIdentity!): ClusterConfirmResponse! clusterConfirm(identity: ClusterIdentity!): ClusterConfirmResponse!
@ -4437,7 +4482,8 @@ type Mutation {
syncHub(id: ID!): [MyHubStatus!]! @authorized syncHub(id: ID!): [MyHubStatus!]! @authorized
updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse! @authorized updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse!
@authorized
deleteClusterReg(cluster_id: String!): String! @authorized deleteClusterReg(cluster_id: String!): String! @authorized
@ -4472,16 +4518,25 @@ type Mutation {
deleteDataSource(input: deleteDSInput!): Boolean! @authorized deleteDataSource(input: deleteDSInput!): Boolean! @authorized
# Manifest Template # Manifest Template
createManifestTemplate(templateInput: TemplateInput): ManifestTemplate! @authorized createManifestTemplate(templateInput: TemplateInput): ManifestTemplate!
@authorized
deleteManifestTemplate(template_id: String!): Boolean! @authorized deleteManifestTemplate(template_id: String!): Boolean! @authorized
#Image Registry Mutations #Image Registry Mutations
createImageRegistry(project_id: String!, imageRegistryInfo: imageRegistryInput!): ImageRegistryResponse! @authorized createImageRegistry(
project_id: String!
imageRegistryInfo: imageRegistryInput!
): ImageRegistryResponse! @authorized
updateImageRegistry(image_registry_id: String!, project_id: String!, imageRegistryInfo: imageRegistryInput!): ImageRegistryResponse! @authorized updateImageRegistry(
image_registry_id: String!
project_id: String!
imageRegistryInfo: imageRegistryInput!
): ImageRegistryResponse! @authorized
deleteImageRegistry(image_registry_id: String!, project_id: String!): String! @authorized deleteImageRegistry(image_registry_id: String!, project_id: String!): String!
@authorized
} }
type Subscription { type Subscription {
@ -4495,7 +4550,8 @@ type Subscription {
#It is used to listen cluster operation request from the graphql server #It is used to listen cluster operation request from the graphql server
clusterConnect(clusterInfo: ClusterIdentity!): ClusterAction! clusterConnect(clusterInfo: ClusterIdentity!): ClusterAction!
getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse! @authorized getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse!
@authorized
} }
`, BuiltIn: false}, `, BuiltIn: false},
&ast.Source{Name: "graph/usermanagement.graphqls", Input: `type User { &ast.Source{Name: "graph/usermanagement.graphqls", Input: `type User {
@ -4528,6 +4584,69 @@ input UpdateUserInput {
email: String email: String
company_name: String company_name: String
} }
`, BuiltIn: false},
&ast.Source{Name: "graph/workflow.graphqls", Input: `enum WorkflowRunStatus {
All
Failed
Running
Succeeded
}
input DateRange {
start_date: String!
end_date: String
}
input WorkflowRunFilterInput {
workflow_name: String
cluster_name: String
workflow_status: WorkflowRunStatus
date_range: DateRange
}
input Pagination {
page: Int!
limit: Int!
}
enum WorkflowRunSortingField {
Name
Time
}
input SortInput {
field: WorkflowRunSortingField!
descending: Boolean
}
input GetWorkflowRunsInput {
project_id: ID!
workflow_run_ids: [ID]
pagination: Pagination
sort: SortInput
filter: WorkflowRunFilterInput
}
type WorkflowRun {
workflow_run_id: ID!
workflow_id: ID!
cluster_name: String!
last_updated: String!
project_id: ID!
cluster_id: ID!
workflow_name: String!
cluster_type: String
phase: String!
resiliency_score: Float
experiments_passed: Int
total_experiments: Int
execution_data: String!
}
type GetWorkflowsOutput {
total_no_of_workflow_runs: Int!
workflow_runs: [WorkflowRun]!
}
`, BuiltIn: false}, `, BuiltIn: false},
} }
var parsedSchema = gqlparser.MustLoadSchema(sources...) var parsedSchema = gqlparser.MustLoadSchema(sources...)
@ -5516,17 +5635,17 @@ func (ec *executionContext) field_Query_getUser_args(ctx context.Context, rawArg
return args, nil return args, nil
} }
func (ec *executionContext) field_Query_getWorkFlowRuns_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { func (ec *executionContext) field_Query_getWorkflowRuns_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error var err error
args := map[string]interface{}{} args := map[string]interface{}{}
var arg0 string var arg0 model.GetWorkflowRunsInput
if tmp, ok := rawArgs["project_id"]; ok { if tmp, ok := rawArgs["workflowRunsInput"]; ok {
arg0, err = ec.unmarshalNString2string(ctx, tmp) arg0, err = ec.unmarshalNGetWorkflowRunsInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐGetWorkflowRunsInput(ctx, tmp)
if err != nil { if err != nil {
return nil, err return nil, err
} }
} }
args["project_id"] = arg0 args["workflowRunsInput"] = arg0
return args, nil return args, nil
} }
@ -7961,6 +8080,74 @@ func (ec *executionContext) _Experiments_Desc(ctx context.Context, field graphql
return ec.marshalNString2string(ctx, field.Selections, res) return ec.marshalNString2string(ctx, field.Selections, res)
} }
func (ec *executionContext) _GetWorkflowsOutput_total_no_of_workflow_runs(ctx context.Context, field graphql.CollectedField, obj *model.GetWorkflowsOutput) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "GetWorkflowsOutput",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalNoOfWorkflowRuns, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) _GetWorkflowsOutput_workflow_runs(ctx context.Context, field graphql.CollectedField, obj *model.GetWorkflowsOutput) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "GetWorkflowsOutput",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.WorkflowRuns, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]*model.WorkflowRun)
fc.Result = res
return ec.marshalNWorkflowRun2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx, field.Selections, res)
}
func (ec *executionContext) _GitConfigResponse_Enabled(ctx context.Context, field graphql.CollectedField, obj *model.GitConfigResponse) (ret graphql.Marshaler) { func (ec *executionContext) _GitConfigResponse_Enabled(ctx context.Context, field graphql.CollectedField, obj *model.GitConfigResponse) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
@ -13183,7 +13370,7 @@ func (ec *executionContext) _Provider_Name(ctx context.Context, field graphql.Co
return ec.marshalNString2string(ctx, field.Selections, res) return ec.marshalNString2string(ctx, field.Selections, res)
} }
func (ec *executionContext) _Query_getWorkFlowRuns(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { func (ec *executionContext) _Query_getWorkflowRuns(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r)) ec.Error(ctx, ec.Recover(ctx, r))
@ -13199,7 +13386,7 @@ func (ec *executionContext) _Query_getWorkFlowRuns(ctx context.Context, field gr
ctx = graphql.WithFieldContext(ctx, fc) ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables) rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field_Query_getWorkFlowRuns_args(ctx, rawArgs) args, err := ec.field_Query_getWorkflowRuns_args(ctx, rawArgs)
if err != nil { if err != nil {
ec.Error(ctx, err) ec.Error(ctx, err)
return graphql.Null return graphql.Null
@ -13208,7 +13395,7 @@ func (ec *executionContext) _Query_getWorkFlowRuns(ctx context.Context, field gr
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
directive0 := func(rctx context.Context) (interface{}, error) { directive0 := func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().GetWorkFlowRuns(rctx, args["project_id"].(string)) return ec.resolvers.Query().GetWorkflowRuns(rctx, args["workflowRunsInput"].(model.GetWorkflowRunsInput))
} }
directive1 := func(ctx context.Context) (interface{}, error) { directive1 := func(ctx context.Context) (interface{}, error) {
if ec.directives.Authorized == nil { if ec.directives.Authorized == nil {
@ -13224,10 +13411,10 @@ func (ec *executionContext) _Query_getWorkFlowRuns(ctx context.Context, field gr
if tmp == nil { if tmp == nil {
return nil, nil return nil, nil
} }
if data, ok := tmp.([]*model.WorkflowRun); ok { if data, ok := tmp.(*model.GetWorkflowsOutput); ok {
return data, nil return data, nil
} }
return nil, fmt.Errorf(`unexpected type %T from directive, should be []*github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model.WorkflowRun`, tmp) return nil, fmt.Errorf(`unexpected type %T from directive, should be *github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model.GetWorkflowsOutput`, tmp)
}) })
if err != nil { if err != nil {
ec.Error(ctx, err) ec.Error(ctx, err)
@ -13239,9 +13426,9 @@ func (ec *executionContext) _Query_getWorkFlowRuns(ctx context.Context, field gr
} }
return graphql.Null return graphql.Null
} }
res := resTmp.([]*model.WorkflowRun) res := resTmp.(*model.GetWorkflowsOutput)
fc.Result = res fc.Result = res
return ec.marshalNWorkflowRun2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunᚄ(ctx, field.Selections, res) return ec.marshalNGetWorkflowsOutput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐGetWorkflowsOutput(ctx, field.Selections, res)
} }
func (ec *executionContext) _Query_getCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { func (ec *executionContext) _Query_getCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
@ -17154,6 +17341,133 @@ func (ec *executionContext) _WorkflowRun_cluster_type(ctx context.Context, field
return ec.marshalOString2ᚖstring(ctx, field.Selections, res) return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
} }
func (ec *executionContext) _WorkflowRun_phase(ctx context.Context, field graphql.CollectedField, obj *model.WorkflowRun) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "WorkflowRun",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Phase, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _WorkflowRun_resiliency_score(ctx context.Context, field graphql.CollectedField, obj *model.WorkflowRun) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "WorkflowRun",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.ResiliencyScore, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*float64)
fc.Result = res
return ec.marshalOFloat2ᚖfloat64(ctx, field.Selections, res)
}
func (ec *executionContext) _WorkflowRun_experiments_passed(ctx context.Context, field graphql.CollectedField, obj *model.WorkflowRun) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "WorkflowRun",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.ExperimentsPassed, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*int)
fc.Result = res
return ec.marshalOInt2ᚖint(ctx, field.Selections, res)
}
func (ec *executionContext) _WorkflowRun_total_experiments(ctx context.Context, field graphql.CollectedField, obj *model.WorkflowRun) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "WorkflowRun",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalExperiments, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*int)
fc.Result = res
return ec.marshalOInt2ᚖint(ctx, field.Selections, res)
}
func (ec *executionContext) _WorkflowRun_execution_data(ctx context.Context, field graphql.CollectedField, obj *model.WorkflowRun) (ret graphql.Marshaler) { func (ec *executionContext) _WorkflowRun_execution_data(ctx context.Context, field graphql.CollectedField, obj *model.WorkflowRun) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
@ -20936,6 +21250,30 @@ func (ec *executionContext) unmarshalInputDSInput(ctx context.Context, obj inter
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputDateRange(ctx context.Context, obj interface{}) (model.DateRange, error) {
var it model.DateRange
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "start_date":
var err error
it.StartDate, err = ec.unmarshalNString2string(ctx, v)
if err != nil {
return it, err
}
case "end_date":
var err error
it.EndDate, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputExperimentInput(ctx context.Context, obj interface{}) (model.ExperimentInput, error) { func (ec *executionContext) unmarshalInputExperimentInput(ctx context.Context, obj interface{}) (model.ExperimentInput, error) {
var it model.ExperimentInput var it model.ExperimentInput
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -20978,6 +21316,48 @@ func (ec *executionContext) unmarshalInputExperimentInput(ctx context.Context, o
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputGetWorkflowRunsInput(ctx context.Context, obj interface{}) (model.GetWorkflowRunsInput, error) {
var it model.GetWorkflowRunsInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "project_id":
var err error
it.ProjectID, err = ec.unmarshalNID2string(ctx, v)
if err != nil {
return it, err
}
case "workflow_run_ids":
var err error
it.WorkflowRunIds, err = ec.unmarshalOID2ᚕᚖstring(ctx, v)
if err != nil {
return it, err
}
case "pagination":
var err error
it.Pagination, err = ec.unmarshalOPagination2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐPagination(ctx, v)
if err != nil {
return it, err
}
case "sort":
var err error
it.Sort, err = ec.unmarshalOSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx, v)
if err != nil {
return it, err
}
case "filter":
var err error
it.Filter, err = ec.unmarshalOWorkflowRunFilterInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunFilterInput(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputGitConfig(ctx context.Context, obj interface{}) (model.GitConfig, error) { func (ec *executionContext) unmarshalInputGitConfig(ctx context.Context, obj interface{}) (model.GitConfig, error) {
var it model.GitConfig var it model.GitConfig
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -21158,6 +21538,30 @@ func (ec *executionContext) unmarshalInputMemberInput(ctx context.Context, obj i
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputPagination(ctx context.Context, obj interface{}) (model.Pagination, error) {
var it model.Pagination
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "page":
var err error
it.Page, err = ec.unmarshalNInt2int(ctx, v)
if err != nil {
return it, err
}
case "limit":
var err error
it.Limit, err = ec.unmarshalNInt2int(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputPodLog(ctx context.Context, obj interface{}) (model.PodLog, error) { func (ec *executionContext) unmarshalInputPodLog(ctx context.Context, obj interface{}) (model.PodLog, error) {
var it model.PodLog var it model.PodLog
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -21266,6 +21670,30 @@ func (ec *executionContext) unmarshalInputPodLogRequest(ctx context.Context, obj
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputSortInput(ctx context.Context, obj interface{}) (model.SortInput, error) {
var it model.SortInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "field":
var err error
it.Field, err = ec.unmarshalNWorkflowRunSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortingField(ctx, v)
if err != nil {
return it, err
}
case "descending":
var err error
it.Descending, err = ec.unmarshalOBoolean2ᚖbool(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputTemplateInput(ctx context.Context, obj interface{}) (model.TemplateInput, error) { func (ec *executionContext) unmarshalInputTemplateInput(ctx context.Context, obj interface{}) (model.TemplateInput, error) {
var it model.TemplateInput var it model.TemplateInput
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -21446,6 +21874,42 @@ func (ec *executionContext) unmarshalInputWeightagesInput(ctx context.Context, o
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputWorkflowRunFilterInput(ctx context.Context, obj interface{}) (model.WorkflowRunFilterInput, error) {
var it model.WorkflowRunFilterInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "workflow_name":
var err error
it.WorkflowName, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
case "cluster_name":
var err error
it.ClusterName, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
case "workflow_status":
var err error
it.WorkflowStatus, err = ec.unmarshalOWorkflowRunStatus2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx, v)
if err != nil {
return it, err
}
case "date_range":
var err error
it.DateRange, err = ec.unmarshalODateRange2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐDateRange(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputWorkflowRunInput(ctx context.Context, obj interface{}) (model.WorkflowRunInput, error) { func (ec *executionContext) unmarshalInputWorkflowRunInput(ctx context.Context, obj interface{}) (model.WorkflowRunInput, error) {
var it model.WorkflowRunInput var it model.WorkflowRunInput
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -22547,6 +23011,38 @@ func (ec *executionContext) _Experiments(ctx context.Context, sel ast.SelectionS
return out return out
} }
var getWorkflowsOutputImplementors = []string{"GetWorkflowsOutput"}
func (ec *executionContext) _GetWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, obj *model.GetWorkflowsOutput) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, getWorkflowsOutputImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("GetWorkflowsOutput")
case "total_no_of_workflow_runs":
out.Values[i] = ec._GetWorkflowsOutput_total_no_of_workflow_runs(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
case "workflow_runs":
out.Values[i] = ec._GetWorkflowsOutput_workflow_runs(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var gitConfigResponseImplementors = []string{"GitConfigResponse"} var gitConfigResponseImplementors = []string{"GitConfigResponse"}
func (ec *executionContext) _GitConfigResponse(ctx context.Context, sel ast.SelectionSet, obj *model.GitConfigResponse) graphql.Marshaler { func (ec *executionContext) _GitConfigResponse(ctx context.Context, sel ast.SelectionSet, obj *model.GitConfigResponse) graphql.Marshaler {
@ -23457,7 +23953,7 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
switch field.Name { switch field.Name {
case "__typename": case "__typename":
out.Values[i] = graphql.MarshalString("Query") out.Values[i] = graphql.MarshalString("Query")
case "getWorkFlowRuns": case "getWorkflowRuns":
field := field field := field
out.Concurrently(i, func() (res graphql.Marshaler) { out.Concurrently(i, func() (res graphql.Marshaler) {
defer func() { defer func() {
@ -23465,7 +23961,7 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
ec.Error(ctx, ec.Recover(ctx, r)) ec.Error(ctx, ec.Recover(ctx, r))
} }
}() }()
res = ec._Query_getWorkFlowRuns(ctx, field) res = ec._Query_getWorkflowRuns(ctx, field)
if res == graphql.Null { if res == graphql.Null {
atomic.AddUint32(&invalids, 1) atomic.AddUint32(&invalids, 1)
} }
@ -24249,6 +24745,17 @@ func (ec *executionContext) _WorkflowRun(ctx context.Context, sel ast.SelectionS
} }
case "cluster_type": case "cluster_type":
out.Values[i] = ec._WorkflowRun_cluster_type(ctx, field, obj) out.Values[i] = ec._WorkflowRun_cluster_type(ctx, field, obj)
case "phase":
out.Values[i] = ec._WorkflowRun_phase(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
case "resiliency_score":
out.Values[i] = ec._WorkflowRun_resiliency_score(ctx, field, obj)
case "experiments_passed":
out.Values[i] = ec._WorkflowRun_experiments_passed(ctx, field, obj)
case "total_experiments":
out.Values[i] = ec._WorkflowRun_total_experiments(ctx, field, obj)
case "execution_data": case "execution_data":
out.Values[i] = ec._WorkflowRun_execution_data(ctx, field, obj) out.Values[i] = ec._WorkflowRun_execution_data(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
@ -25480,6 +25987,24 @@ func (ec *executionContext) marshalNExperiments2ᚖgithubᚗcomᚋlitmuschaosᚋ
return ec._Experiments(ctx, sel, v) return ec._Experiments(ctx, sel, v)
} }
func (ec *executionContext) unmarshalNGetWorkflowRunsInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐGetWorkflowRunsInput(ctx context.Context, v interface{}) (model.GetWorkflowRunsInput, error) {
return ec.unmarshalInputGetWorkflowRunsInput(ctx, v)
}
func (ec *executionContext) marshalNGetWorkflowsOutput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐGetWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, v model.GetWorkflowsOutput) graphql.Marshaler {
return ec._GetWorkflowsOutput(ctx, sel, &v)
}
func (ec *executionContext) marshalNGetWorkflowsOutput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐGetWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, v *model.GetWorkflowsOutput) graphql.Marshaler {
if v == nil {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
return ec._GetWorkflowsOutput(ctx, sel, v)
}
func (ec *executionContext) unmarshalNGitConfig2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐGitConfig(ctx context.Context, v interface{}) (model.GitConfig, error) { func (ec *executionContext) unmarshalNGitConfig2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐGitConfig(ctx context.Context, v interface{}) (model.GitConfig, error) {
return ec.unmarshalInputGitConfig(ctx, v) return ec.unmarshalInputGitConfig(ctx, v)
} }
@ -26234,7 +26759,7 @@ func (ec *executionContext) marshalNWorkflowRun2githubᚗcomᚋlitmuschaosᚋlit
return ec._WorkflowRun(ctx, sel, &v) return ec._WorkflowRun(ctx, sel, &v)
} }
func (ec *executionContext) marshalNWorkflowRun2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx context.Context, sel ast.SelectionSet, v []*model.WorkflowRun) graphql.Marshaler { func (ec *executionContext) marshalNWorkflowRun2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx context.Context, sel ast.SelectionSet, v []*model.WorkflowRun) graphql.Marshaler {
ret := make(graphql.Array, len(v)) ret := make(graphql.Array, len(v))
var wg sync.WaitGroup var wg sync.WaitGroup
isLen1 := len(v) == 1 isLen1 := len(v) == 1
@ -26258,7 +26783,7 @@ func (ec *executionContext) marshalNWorkflowRun2ᚕᚖgithubᚗcomᚋlitmuschaos
if !isLen1 { if !isLen1 {
defer wg.Done() defer wg.Done()
} }
ret[i] = ec.marshalNWorkflowRun2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx, sel, v[i]) ret[i] = ec.marshalOWorkflowRun2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx, sel, v[i])
} }
if isLen1 { if isLen1 {
f(i) f(i)
@ -26285,6 +26810,15 @@ func (ec *executionContext) unmarshalNWorkflowRunInput2githubᚗcomᚋlitmuschao
return ec.unmarshalInputWorkflowRunInput(ctx, v) return ec.unmarshalInputWorkflowRunInput(ctx, v)
} }
func (ec *executionContext) unmarshalNWorkflowRunSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortingField(ctx context.Context, v interface{}) (model.WorkflowRunSortingField, error) {
var res model.WorkflowRunSortingField
return res, res.UnmarshalGQL(v)
}
func (ec *executionContext) marshalNWorkflowRunSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortingField(ctx context.Context, sel ast.SelectionSet, v model.WorkflowRunSortingField) graphql.Marshaler {
return v
}
func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler { func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler {
return ec.___Directive(ctx, sel, &v) return ec.___Directive(ctx, sel, &v)
} }
@ -26797,6 +27331,18 @@ func (ec *executionContext) marshalODSResponse2ᚖgithubᚗcomᚋlitmuschaosᚋl
return ec._DSResponse(ctx, sel, v) return ec._DSResponse(ctx, sel, v)
} }
func (ec *executionContext) unmarshalODateRange2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐDateRange(ctx context.Context, v interface{}) (model.DateRange, error) {
return ec.unmarshalInputDateRange(ctx, v)
}
func (ec *executionContext) unmarshalODateRange2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐDateRange(ctx context.Context, v interface{}) (*model.DateRange, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalODateRange2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐDateRange(ctx, v)
return &res, err
}
func (ec *executionContext) unmarshalOFloat2float64(ctx context.Context, v interface{}) (float64, error) { func (ec *executionContext) unmarshalOFloat2float64(ctx context.Context, v interface{}) (float64, error) {
return graphql.UnmarshalFloat(v) return graphql.UnmarshalFloat(v)
} }
@ -26995,6 +27541,18 @@ func (ec *executionContext) marshalOMyHubStatus2ᚖgithubᚗcomᚋlitmuschaosᚋ
return ec._MyHubStatus(ctx, sel, v) return ec._MyHubStatus(ctx, sel, v)
} }
func (ec *executionContext) unmarshalOPagination2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐPagination(ctx context.Context, v interface{}) (model.Pagination, error) {
return ec.unmarshalInputPagination(ctx, v)
}
func (ec *executionContext) unmarshalOPagination2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐPagination(ctx context.Context, v interface{}) (*model.Pagination, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOPagination2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐPagination(ctx, v)
return &res, err
}
func (ec *executionContext) marshalOScheduledWorkflows2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx context.Context, sel ast.SelectionSet, v model.ScheduledWorkflows) graphql.Marshaler { func (ec *executionContext) marshalOScheduledWorkflows2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx context.Context, sel ast.SelectionSet, v model.ScheduledWorkflows) graphql.Marshaler {
return ec._ScheduledWorkflows(ctx, sel, &v) return ec._ScheduledWorkflows(ctx, sel, &v)
} }
@ -27006,6 +27564,18 @@ func (ec *executionContext) marshalOScheduledWorkflows2ᚖgithubᚗcomᚋlitmusc
return ec._ScheduledWorkflows(ctx, sel, v) return ec._ScheduledWorkflows(ctx, sel, v)
} }
func (ec *executionContext) unmarshalOSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx context.Context, v interface{}) (model.SortInput, error) {
return ec.unmarshalInputSortInput(ctx, v)
}
func (ec *executionContext) unmarshalOSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx context.Context, v interface{}) (*model.SortInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx, v)
return &res, err
}
func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) { func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) {
return graphql.UnmarshalString(v) return graphql.UnmarshalString(v)
} }
@ -27084,6 +27654,53 @@ func (ec *executionContext) marshalOWorkflow2ᚖgithubᚗcomᚋlitmuschaosᚋlit
return ec._Workflow(ctx, sel, v) return ec._Workflow(ctx, sel, v)
} }
func (ec *executionContext) marshalOWorkflowRun2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx context.Context, sel ast.SelectionSet, v model.WorkflowRun) graphql.Marshaler {
return ec._WorkflowRun(ctx, sel, &v)
}
func (ec *executionContext) marshalOWorkflowRun2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx context.Context, sel ast.SelectionSet, v *model.WorkflowRun) graphql.Marshaler {
if v == nil {
return graphql.Null
}
return ec._WorkflowRun(ctx, sel, v)
}
func (ec *executionContext) unmarshalOWorkflowRunFilterInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunFilterInput(ctx context.Context, v interface{}) (model.WorkflowRunFilterInput, error) {
return ec.unmarshalInputWorkflowRunFilterInput(ctx, v)
}
func (ec *executionContext) unmarshalOWorkflowRunFilterInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunFilterInput(ctx context.Context, v interface{}) (*model.WorkflowRunFilterInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOWorkflowRunFilterInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunFilterInput(ctx, v)
return &res, err
}
func (ec *executionContext) unmarshalOWorkflowRunStatus2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx context.Context, v interface{}) (model.WorkflowRunStatus, error) {
var res model.WorkflowRunStatus
return res, res.UnmarshalGQL(v)
}
func (ec *executionContext) marshalOWorkflowRunStatus2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx context.Context, sel ast.SelectionSet, v model.WorkflowRunStatus) graphql.Marshaler {
return v
}
func (ec *executionContext) unmarshalOWorkflowRunStatus2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx context.Context, v interface{}) (*model.WorkflowRunStatus, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOWorkflowRunStatus2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx, v)
return &res, err
}
func (ec *executionContext) marshalOWorkflowRunStatus2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx context.Context, sel ast.SelectionSet, v *model.WorkflowRunStatus) graphql.Marshaler {
if v == nil {
return graphql.Null
}
return v
}
func (ec *executionContext) marshalOWorkflowRuns2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRuns(ctx context.Context, sel ast.SelectionSet, v model.WorkflowRuns) graphql.Marshaler { func (ec *executionContext) marshalOWorkflowRuns2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRuns(ctx context.Context, sel ast.SelectionSet, v model.WorkflowRuns) graphql.Marshaler {
return ec._WorkflowRuns(ctx, sel, &v) return ec._WorkflowRuns(ctx, sel, &v)
} }

View File

@ -197,6 +197,11 @@ type DSResponse struct {
UpdatedAt *string `json:"updated_at"` UpdatedAt *string `json:"updated_at"`
} }
type DateRange struct {
StartDate string `json:"start_date"`
EndDate *string `json:"end_date"`
}
type ExperimentInput struct { type ExperimentInput struct {
ProjectID string `json:"ProjectID"` ProjectID string `json:"ProjectID"`
ChartName string `json:"ChartName"` ChartName string `json:"ChartName"`
@ -211,6 +216,19 @@ type Experiments struct {
Desc string `json:"Desc"` Desc string `json:"Desc"`
} }
type GetWorkflowRunsInput struct {
ProjectID string `json:"project_id"`
WorkflowRunIds []*string `json:"workflow_run_ids"`
Pagination *Pagination `json:"pagination"`
Sort *SortInput `json:"sort"`
Filter *WorkflowRunFilterInput `json:"filter"`
}
type GetWorkflowsOutput struct {
TotalNoOfWorkflowRuns int `json:"total_no_of_workflow_runs"`
WorkflowRuns []*WorkflowRun `json:"workflow_runs"`
}
type GitConfig struct { type GitConfig struct {
ProjectID string `json:"ProjectID"` ProjectID string `json:"ProjectID"`
Branch string `json:"Branch"` Branch string `json:"Branch"`
@ -351,6 +369,11 @@ type PackageInformation struct {
Experiments []*Experiments `json:"Experiments"` Experiments []*Experiments `json:"Experiments"`
} }
type Pagination struct {
Page int `json:"page"`
Limit int `json:"limit"`
}
type PodLog struct { type PodLog struct {
ClusterID *ClusterIdentity `json:"cluster_id"` ClusterID *ClusterIdentity `json:"cluster_id"`
RequestID string `json:"request_id"` RequestID string `json:"request_id"`
@ -415,6 +438,11 @@ type ScheduledWorkflows struct {
IsRemoved bool `json:"isRemoved"` IsRemoved bool `json:"isRemoved"`
} }
type SortInput struct {
Field WorkflowRunSortingField `json:"field"`
Descending *bool `json:"descending"`
}
type Spec struct { type Spec struct {
DisplayName string `json:"DisplayName"` DisplayName string `json:"DisplayName"`
CategoryDescription string `json:"CategoryDescription"` CategoryDescription string `json:"CategoryDescription"`
@ -507,9 +535,20 @@ type WorkflowRun struct {
ClusterID string `json:"cluster_id"` ClusterID string `json:"cluster_id"`
WorkflowName string `json:"workflow_name"` WorkflowName string `json:"workflow_name"`
ClusterType *string `json:"cluster_type"` ClusterType *string `json:"cluster_type"`
Phase string `json:"phase"`
ResiliencyScore *float64 `json:"resiliency_score"`
ExperimentsPassed *int `json:"experiments_passed"`
TotalExperiments *int `json:"total_experiments"`
ExecutionData string `json:"execution_data"` ExecutionData string `json:"execution_data"`
} }
type WorkflowRunFilterInput struct {
WorkflowName *string `json:"workflow_name"`
ClusterName *string `json:"cluster_name"`
WorkflowStatus *WorkflowRunStatus `json:"workflow_status"`
DateRange *DateRange `json:"date_range"`
}
type WorkflowRunInput struct { type WorkflowRunInput struct {
WorkflowID string `json:"workflow_id"` WorkflowID string `json:"workflow_id"`
WorkflowRunID string `json:"workflow_run_id"` WorkflowRunID string `json:"workflow_run_id"`
@ -828,3 +867,89 @@ func (e *MemberRole) UnmarshalGQL(v interface{}) error {
func (e MemberRole) MarshalGQL(w io.Writer) { func (e MemberRole) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String())) fmt.Fprint(w, strconv.Quote(e.String()))
} }
type WorkflowRunSortingField string
const (
WorkflowRunSortingFieldName WorkflowRunSortingField = "Name"
WorkflowRunSortingFieldTime WorkflowRunSortingField = "Time"
)
var AllWorkflowRunSortingField = []WorkflowRunSortingField{
WorkflowRunSortingFieldName,
WorkflowRunSortingFieldTime,
}
func (e WorkflowRunSortingField) IsValid() bool {
switch e {
case WorkflowRunSortingFieldName, WorkflowRunSortingFieldTime:
return true
}
return false
}
func (e WorkflowRunSortingField) String() string {
return string(e)
}
func (e *WorkflowRunSortingField) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = WorkflowRunSortingField(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid WorkflowRunSortingField", str)
}
return nil
}
func (e WorkflowRunSortingField) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type WorkflowRunStatus string
const (
WorkflowRunStatusAll WorkflowRunStatus = "All"
WorkflowRunStatusFailed WorkflowRunStatus = "Failed"
WorkflowRunStatusRunning WorkflowRunStatus = "Running"
WorkflowRunStatusSucceeded WorkflowRunStatus = "Succeeded"
)
var AllWorkflowRunStatus = []WorkflowRunStatus{
WorkflowRunStatusAll,
WorkflowRunStatusFailed,
WorkflowRunStatusRunning,
WorkflowRunStatusSucceeded,
}
func (e WorkflowRunStatus) IsValid() bool {
switch e {
case WorkflowRunStatusAll, WorkflowRunStatusFailed, WorkflowRunStatusRunning, WorkflowRunStatusSucceeded:
return true
}
return false
}
func (e WorkflowRunStatus) String() string {
return string(e)
}
func (e *WorkflowRunStatus) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = WorkflowRunStatus(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid WorkflowRunStatus", str)
}
return nil
}
func (e WorkflowRunStatus) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}

View File

@ -114,18 +114,6 @@ type ChaosWorkFlowResponse {
isCustomWorkflow: Boolean! isCustomWorkflow: Boolean!
} }
type WorkflowRun {
workflow_run_id: ID!
workflow_id: ID!
cluster_name: String!
last_updated: String!
project_id: ID!
cluster_id: ID!
workflow_name: String!
cluster_type: String
execution_data: String!
}
input WorkflowRunInput { input WorkflowRunInput {
workflow_id: ID! workflow_id: ID!
workflow_run_id: ID! workflow_run_id: ID!
@ -282,8 +270,9 @@ input KubeGVRRequest {
} }
type Query { type Query {
# [Deprecated soon] getWorkflowRuns(
getWorkFlowRuns(project_id: String!): [WorkflowRun!]! @authorized workflowRunsInput: GetWorkflowRunsInput!
): GetWorkflowsOutput! @authorized
getCluster(project_id: String!, cluster_type: String): [Cluster!]! @authorized getCluster(project_id: String!, cluster_type: String): [Cluster!]! @authorized
@ -335,7 +324,10 @@ type Query {
#Image Registry Queries #Image Registry Queries
ListImageRegistry(project_id: String!): [ImageRegistryResponse!] @authorized ListImageRegistry(project_id: String!): [ImageRegistryResponse!] @authorized
GetImageRegistry(image_registry_id: String!, project_id: String!): ImageRegistryResponse! @authorized GetImageRegistry(
image_registry_id: String!
project_id: String!
): ImageRegistryResponse! @authorized
} }
type Mutation { type Mutation {
@ -343,8 +335,7 @@ type Mutation {
userClusterReg(clusterInput: ClusterInput!): clusterRegResponse! @authorized userClusterReg(clusterInput: ClusterInput!): clusterRegResponse! @authorized
#It is used to create chaosworkflow #It is used to create chaosworkflow
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse! createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse! @authorized
@authorized
reRunChaosWorkFlow(workflowID: String!): String! @authorized reRunChaosWorkFlow(workflowID: String!): String! @authorized
@ -433,9 +424,16 @@ type Mutation {
deleteManifestTemplate(template_id: String!): Boolean! @authorized deleteManifestTemplate(template_id: String!): Boolean! @authorized
#Image Registry Mutations #Image Registry Mutations
createImageRegistry(project_id: String!, imageRegistryInfo: imageRegistryInput!): ImageRegistryResponse! @authorized createImageRegistry(
project_id: String!
imageRegistryInfo: imageRegistryInput!
): ImageRegistryResponse! @authorized
updateImageRegistry(image_registry_id: String!, project_id: String!, imageRegistryInfo: imageRegistryInput!): ImageRegistryResponse! @authorized updateImageRegistry(
image_registry_id: String!
project_id: String!
imageRegistryInfo: imageRegistryInput!
): ImageRegistryResponse! @authorized
deleteImageRegistry(image_registry_id: String!, project_id: String!): String! @authorized deleteImageRegistry(image_registry_id: String!, project_id: String!): String! @authorized
} }

View File

@ -295,12 +295,12 @@ func (r *mutationResolver) DeleteImageRegistry(ctx context.Context, imageRegistr
return diRegistry, err return diRegistry, err
} }
func (r *queryResolver) GetWorkFlowRuns(ctx context.Context, projectID string) ([]*model.WorkflowRun, error) { func (r *queryResolver) GetWorkflowRuns(ctx context.Context, workflowRunsInput model.GetWorkflowRunsInput) (*model.GetWorkflowsOutput, error) {
err := validate.ValidateRole(ctx, projectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation) err := validate.ValidateRole(ctx, workflowRunsInput.ProjectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return wfHandler.QueryWorkflowRuns(projectID) return wfHandler.QueryWorkflowRuns(workflowRunsInput)
} }
func (r *queryResolver) GetCluster(ctx context.Context, projectID string, clusterType *string) ([]*model.Cluster, error) { func (r *queryResolver) GetCluster(ctx context.Context, projectID string, clusterType *string) ([]*model.Cluster, error) {
@ -346,11 +346,7 @@ func (r *queryResolver) ListWorkflow(ctx context.Context, projectID string, work
if err != nil { if err != nil {
return nil, err return nil, err
} }
if len(workflowIds) == 0 { return wfHandler.QueryListWorkflow(projectID, workflowIds)
return wfHandler.QueryListWorkflow(projectID)
} else {
return wfHandler.QueryListWorkflowByIDs(workflowIds)
}
} }
func (r *queryResolver) GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error) { func (r *queryResolver) GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error) {

View File

@ -0,0 +1,62 @@
enum WorkflowRunStatus {
All
Failed
Running
Succeeded
}
input DateRange {
start_date: String!
end_date: String
}
input WorkflowRunFilterInput {
workflow_name: String
cluster_name: String
workflow_status: WorkflowRunStatus
date_range: DateRange
}
input Pagination {
page: Int!
limit: Int!
}
enum WorkflowRunSortingField {
Name
Time
}
input SortInput {
field: WorkflowRunSortingField!
descending: Boolean
}
input GetWorkflowRunsInput {
project_id: ID!
workflow_run_ids: [ID]
pagination: Pagination
sort: SortInput
filter: WorkflowRunFilterInput
}
type WorkflowRun {
workflow_run_id: ID!
workflow_id: ID!
cluster_name: String!
last_updated: String!
project_id: ID!
cluster_id: ID!
workflow_name: String!
cluster_type: String
phase: String!
resiliency_score: Float
experiments_passed: Int
total_experiments: Int
execution_data: String!
}
type GetWorkflowsOutput {
total_no_of_workflow_runs: Int!
workflow_runs: [WorkflowRun]!
}

View File

@ -4,11 +4,14 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt"
"log" "log"
"strconv" "strconv"
"strings" "strings"
"time" "time"
"go.mongodb.org/mongo-driver/mongo"
"github.com/tidwall/gjson" "github.com/tidwall/gjson"
"github.com/tidwall/sjson" "github.com/tidwall/sjson"
@ -17,6 +20,7 @@ import (
"github.com/google/uuid" "github.com/google/uuid"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model" "github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model"
types "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/chaos-workflow"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/chaos-workflow/ops" "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/chaos-workflow/ops"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster" "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster"
store "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/data-store" store "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/data-store"
@ -115,37 +119,251 @@ func UpdateWorkflow(ctx context.Context, input *model.ChaosWorkFlowInput, r *sto
}, nil }, nil
} }
// GetWorkflowRuns sends all the workflow runs for a project from the DB // QueryWorkflowRuns sends all the workflow runs for a project from the DB
func QueryWorkflowRuns(project_id string) ([]*model.WorkflowRun, error) { func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOutput, error) {
workflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}}) var pipeline mongo.Pipeline
if err != nil {
return nil, err
}
result := []*model.WorkflowRun{}
for _, workflow := range workflows { // Match with projectID
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID) matchStage := bson.D{
if err != nil { {"$match", bson.D{
return nil, err {"project_id", input.ProjectID},
}},
} }
for _, wfrun := range workflow.WorkflowRuns { pipeline = append(pipeline, matchStage)
includeAllFromWorkflow := bson.D{
{"workflow_id", 1},
{"workflow_name", 1},
{"workflow_manifest", 1},
{"cronSyntax", 1},
{"workflow_description", 1},
{"weightages", 1},
{"isCustomWorkflow", 1},
{"updated_at", 1},
{"created_at", 1},
{"project_id", 1},
{"cluster_id", 1},
{"cluster_name", 1},
{"cluster_type", 1},
{"isRemoved", 1},
}
// Match the pipelineIds from the input array
if len(input.WorkflowRunIds) != 0 {
matchWfRunIdStage := bson.D{
{"$project", append(includeAllFromWorkflow,
bson.E{Key: "workflow_runs", Value: bson.D{
{"$filter", bson.D{
{"input", "$workflow_runs"},
{"as", "wfRun"},
{"cond", bson.D{
{"$in", bson.A{"$$wfRun.workflow_run_id", input.WorkflowRunIds}},
}},
}},
}},
)},
}
pipeline = append(pipeline, matchWfRunIdStage)
}
// Filtering based on multiple parameters
if input.Filter != nil {
// Filtering based on workflow name
if input.Filter.WorkflowName != nil && *input.Filter.WorkflowName != "" {
matchWfNameStage := bson.D{
{"$match", bson.D{
{"workflow_name", bson.D{
{"$regex", input.Filter.WorkflowName},
}},
}},
}
pipeline = append(pipeline, matchWfNameStage)
}
// Filtering based on cluster name
if input.Filter.ClusterName != nil && *input.Filter.ClusterName != "All" && *input.Filter.ClusterName != "" {
matchClusterStage := bson.D{
{"$match", bson.D{
{"cluster_name", input.Filter.ClusterName},
}},
}
pipeline = append(pipeline, matchClusterStage)
}
// Filtering based on phase
if input.Filter.WorkflowStatus != nil && *input.Filter.WorkflowStatus != "All" && *input.Filter.WorkflowStatus != "" {
filterWfRunPhaseStage := bson.D{
{"$project", append(includeAllFromWorkflow,
bson.E{Key: "workflow_runs", Value: bson.D{
{"$filter", bson.D{
{"input", "$workflow_runs"},
{"as", "wfRun"},
{"cond", bson.D{
{"$eq", bson.A{"$$wfRun.phase", string(*input.Filter.WorkflowStatus)}},
}},
}},
}},
)},
}
pipeline = append(pipeline, filterWfRunPhaseStage)
}
// Filtering based on date range
if input.Filter.DateRange != nil {
endDate := string(time.Now().Unix())
if input.Filter.DateRange.EndDate != nil {
endDate = *input.Filter.DateRange.EndDate
}
filterWfRunDateStage := bson.D{
{"$project", append(includeAllFromWorkflow,
bson.E{Key: "workflow_runs", Value: bson.D{
{"$filter", bson.D{
{"input", "$workflow_runs"},
{"as", "wfRun"},
{"cond", bson.D{
{"$and", bson.A{
bson.D{{"$lte", bson.A{"$$wfRun.last_updated", endDate}}},
bson.D{{"$gte", bson.A{"$$wfRun.last_updated", input.Filter.DateRange.StartDate}}},
}},
}},
}},
}},
)},
}
pipeline = append(pipeline, filterWfRunDateStage)
}
}
// Flatten out the workflow runs
unwindStage := bson.D{
{"$unwind", bson.D{
{"path", "$workflow_runs"},
}},
}
pipeline = append(pipeline, unwindStage)
var sortStage bson.D
switch {
case input.Sort != nil && input.Sort.Field == model.WorkflowRunSortingFieldTime:
// Sorting based on LastUpdated time
if input.Sort.Descending != nil && *input.Sort.Descending {
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_runs.last_updated", -1},
}},
}
} else {
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_runs.last_updated", 1},
}},
}
}
case input.Sort != nil && input.Sort.Field == model.WorkflowRunSortingFieldName:
// Sorting based on WorkflowName time
if input.Sort.Descending != nil && *input.Sort.Descending {
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_name", -1},
}},
}
} else {
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_name", 1},
}},
}
}
default:
// Default sorting: sorts it by LastUpdated time in descending order
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_runs.last_updated", -1},
}},
}
}
// Pagination
paginatedWorkflows := bson.A{
sortStage,
}
if input.Pagination != nil {
paginationSkipStage := bson.D{
{"$skip", input.Pagination.Page * input.Pagination.Limit},
}
paginationLimitStage := bson.D{
{"$limit", input.Pagination.Limit},
}
paginatedWorkflows = append(paginatedWorkflows, paginationSkipStage, paginationLimitStage)
}
// Add two stages where we first count the number of filtered workflow and then paginate the results
facetStage := bson.D{
{"$facet", bson.D{
{"total_filtered_workflow_runs", bson.A{
bson.D{{"$count", "count"}},
}},
{"flattened_workflow_runs", paginatedWorkflows},
}},
}
pipeline = append(pipeline, facetStage)
// Call aggregation on pipeline
workflowsCursor, err := dbOperationsWorkflow.GetAggregateWorkflows(pipeline)
var result []*model.WorkflowRun
var workflows []dbSchemaWorkflow.AggregatedWorkflowRuns
if err = workflowsCursor.All(context.Background(), &workflows); err != nil {
fmt.Println(err)
return &model.GetWorkflowsOutput{
TotalNoOfWorkflowRuns: 0,
WorkflowRuns: result,
}, nil
}
for _, workflow := range workflows[0].FlattenedWorkflowRuns {
workflowRun := workflow.WorkflowRuns
newWorkflowRun := model.WorkflowRun{ newWorkflowRun := model.WorkflowRun{
WorkflowName: workflow.WorkflowName, WorkflowName: workflow.WorkflowName,
WorkflowID: workflow.WorkflowID, WorkflowID: workflow.WorkflowID,
WorkflowRunID: wfrun.WorkflowRunID, WorkflowRunID: workflowRun.WorkflowRunID,
LastUpdated: wfrun.LastUpdated, LastUpdated: workflowRun.LastUpdated,
ProjectID: workflow.ProjectID, ProjectID: workflow.ProjectID,
ClusterID: workflow.ClusterID, ClusterID: workflow.ClusterID,
ExecutionData: wfrun.ExecutionData, Phase: workflowRun.Phase,
ClusterName: cluster.ClusterName, ResiliencyScore: workflowRun.ResiliencyScore,
ClusterType: &cluster.ClusterType, ExperimentsPassed: workflowRun.ExperimentsPassed,
TotalExperiments: workflowRun.TotalExperiments,
ExecutionData: workflowRun.ExecutionData,
ClusterName: workflow.ClusterName,
ClusterType: &workflow.ClusterType,
} }
result = append(result, &newWorkflowRun) result = append(result, &newWorkflowRun)
} }
totalFilteredWorkflowRuns := 0
if len(workflows) > 0 && len(workflows[0].TotalFilteredWorkflowRuns) > 0 {
totalFilteredWorkflowRuns = workflows[0].TotalFilteredWorkflowRuns[0].Count
} }
return result, nil
output := model.GetWorkflowsOutput{
TotalNoOfWorkflowRuns: totalFilteredWorkflowRuns,
WorkflowRuns: result,
}
return &output, nil
} }
// Deprecated
func QueryWorkflows(project_id string) ([]*model.ScheduledWorkflows, error) { func QueryWorkflows(project_id string) ([]*model.ScheduledWorkflows, error) {
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}}) chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}})
if err != nil { if err != nil {
@ -186,22 +404,32 @@ func QueryWorkflows(project_id string) ([]*model.ScheduledWorkflows, error) {
return result, nil return result, nil
} }
func QueryListWorkflow(project_id string) ([]*model.Workflow, error) { // QueryListWorkflow returns all the workflows present in the given project
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}}) func QueryListWorkflow(project_id string, workflowIds []*string) ([]*model.Workflow, error) {
var query bson.D
if len(workflowIds) != 0 {
query = bson.D{
{"project_id", project_id},
{"workflow_id", bson.M{"$in": workflowIds}},
}
} else {
query = bson.D{
{"project_id", project_id},
}
}
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(query)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var result []*model.Workflow
result := []*model.Workflow{}
for _, workflow := range chaosWorkflows { for _, workflow := range chaosWorkflows {
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID) cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var Weightages []*model.Weightages var Weightages []*model.Weightages
copier.Copy(&Weightages, &workflow.Weightages) copier.Copy(&Weightages, &workflow.Weightages)
var WorkflowRuns []*model.WorkflowRuns var WorkflowRuns []*model.WorkflowRuns
copier.Copy(&WorkflowRuns, &workflow.WorkflowRuns) copier.Copy(&WorkflowRuns, &workflow.WorkflowRuns)
@ -228,66 +456,35 @@ func QueryListWorkflow(project_id string) ([]*model.Workflow, error) {
return result, nil return result, nil
} }
func QueryListWorkflowByIDs(workflow_ids []*string) ([]*model.Workflow, error) {
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"workflow_id", bson.M{"$in": workflow_ids}}})
if err != nil {
return nil, err
}
result := []*model.Workflow{}
for _, workflow := range chaosWorkflows {
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
if err != nil {
return nil, err
}
var Weightages []*model.Weightages
copier.Copy(&Weightages, &workflow.Weightages)
var WorkflowRuns []*model.WorkflowRuns
copier.Copy(&WorkflowRuns, &workflow.WorkflowRuns)
newChaosWorkflows := model.Workflow{
WorkflowType: string(workflow.WorkflowType),
WorkflowID: workflow.WorkflowID,
WorkflowManifest: workflow.WorkflowManifest,
WorkflowName: workflow.WorkflowName,
CronSyntax: workflow.CronSyntax,
WorkflowDescription: workflow.WorkflowDescription,
Weightages: Weightages,
IsCustomWorkflow: workflow.IsCustomWorkflow,
UpdatedAt: workflow.UpdatedAt,
CreatedAt: workflow.CreatedAt,
ProjectID: workflow.ProjectID,
ClusterName: cluster.ClusterName,
ClusterID: cluster.ClusterID,
ClusterType: cluster.ClusterType,
WorkflowRuns: WorkflowRuns,
}
result = append(result, &newChaosWorkflows)
}
return result, nil
}
// WorkFlowRunHandler Updates or Inserts a new Workflow Run into the DB // WorkFlowRunHandler Updates or Inserts a new Workflow Run into the DB
func WorkFlowRunHandler(input model.WorkflowRunInput, r store.StateData) (string, error) { func WorkFlowRunHandler(input model.WorkflowRunInput, r store.StateData) (string, error) {
cluster, err := cluster.VerifyCluster(*input.ClusterID) cluster, err := cluster.VerifyCluster(*input.ClusterID)
if err != nil { if err != nil {
log.Print("ERROR", err) log.Println("ERROR", err)
return "", err
}
// Parse and store execution data
var executionData types.ExecutionData
err = json.Unmarshal([]byte(input.ExecutionData), &executionData)
if err != nil {
log.Println("Can not parse Execution Data of workflow run with id: ", input.WorkflowRunID)
return "", err return "", err
} }
// Resiliency Score will be calculated only if workflow execution is completed // Resiliency Score will be calculated only if workflow execution is completed
if input.Completed { if input.Completed {
input.ExecutionData = ops.ResiliencyScoreCalculator(input.ExecutionData, input.WorkflowID) executionData = ops.ResiliencyScoreCalculator(executionData, input.WorkflowID)
} }
// err = dbOperationsWorkflow.UpdateWorkflowRun(dbOperationsWorkflow.WorkflowRun(newWorkflowRun)) count := 0
count, err := dbOperationsWorkflow.UpdateWorkflowRun(input.WorkflowID, dbSchemaWorkflow.ChaosWorkflowRun{ count, err = dbOperationsWorkflow.UpdateWorkflowRun(input.WorkflowID, dbSchemaWorkflow.ChaosWorkflowRun{
WorkflowRunID: input.WorkflowRunID, WorkflowRunID: input.WorkflowRunID,
LastUpdated: strconv.FormatInt(time.Now().Unix(), 10), LastUpdated: strconv.FormatInt(time.Now().Unix(), 10),
Phase: executionData.Phase,
ResiliencyScore: &executionData.ResiliencyScore,
ExperimentsPassed: &executionData.ExperimentsPassed,
TotalExperiments: &executionData.TotalExperiments,
ExecutionData: input.ExecutionData, ExecutionData: input.ExecutionData,
Completed: input.Completed, Completed: input.Completed,
}) })
@ -307,6 +504,10 @@ func WorkFlowRunHandler(input model.WorkflowRunInput, r store.StateData) (string
LastUpdated: strconv.FormatInt(time.Now().Unix(), 10), LastUpdated: strconv.FormatInt(time.Now().Unix(), 10),
WorkflowRunID: input.WorkflowRunID, WorkflowRunID: input.WorkflowRunID,
WorkflowName: input.WorkflowName, WorkflowName: input.WorkflowName,
Phase: executionData.Phase,
ResiliencyScore: &executionData.ResiliencyScore,
ExperimentsPassed: &executionData.ExperimentsPassed,
TotalExperiments: &executionData.TotalExperiments,
ExecutionData: input.ExecutionData, ExecutionData: input.ExecutionData,
WorkflowID: input.WorkflowID, WorkflowID: input.WorkflowID,
}, &r) }, &r)

View File

@ -14,6 +14,7 @@ import (
"github.com/jinzhu/copier" "github.com/jinzhu/copier"
chaosTypes "github.com/litmuschaos/chaos-operator/pkg/apis/litmuschaos/v1alpha1" chaosTypes "github.com/litmuschaos/chaos-operator/pkg/apis/litmuschaos/v1alpha1"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model" "github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model"
types "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/chaos-workflow"
clusterOps "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster" clusterOps "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster"
clusterHandler "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster/handler" clusterHandler "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster/handler"
store "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/data-store" store "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/data-store"
@ -26,48 +27,6 @@ import (
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
) )
type WorkflowEvent struct {
WorkflowID string `json:"-"`
EventType string `json:"event_type"`
UID string `json:"-"`
Namespace string `json:"namespace"`
Name string `json:"name"`
CreationTimestamp string `json:"creationTimestamp"`
Phase string `json:"phase"`
Message string `json:"message"`
StartedAt string `json:"startedAt"`
FinishedAt string `json:"finishedAt"`
Nodes map[string]Node `json:"nodes"`
}
// each node/step data
type Node struct {
Name string `json:"name"`
Phase string `json:"phase"`
Message string `json:"message"`
StartedAt string `json:"startedAt"`
FinishedAt string `json:"finishedAt"`
Children []string `json:"children"`
Type string `json:"type"`
ChaosExp *ChaosData `json:"chaosData,omitempty"`
}
// chaos data
type ChaosData struct {
EngineUID string `json:"engineUID"`
EngineName string `json:"engineName"`
Namespace string `json:"namespace"`
ExperimentName string `json:"experimentName"`
ExperimentStatus string `json:"experimentStatus"`
LastUpdatedAt string `json:"lastUpdatedAt"`
ExperimentVerdict string `json:"experimentVerdict"`
ExperimentPod string `json:"experimentPod"`
RunnerPod string `json:"runnerPod"`
ProbeSuccessPercentage string `json:"probeSuccessPercentage"`
FailStep string `json:"failStep"`
ChaosResult *chaosTypes.ChaosResult `json:"chaosResult"`
}
// ProcessWorkflow takes the workflow and processes it as required // ProcessWorkflow takes the workflow and processes it as required
func ProcessWorkflow(workflow *model.ChaosWorkFlowInput) (*model.ChaosWorkFlowInput, *dbSchemaWorkflow.ChaosWorkflowType, error) { func ProcessWorkflow(workflow *model.ChaosWorkFlowInput) (*model.ChaosWorkFlowInput, *dbSchemaWorkflow.ChaosWorkflowType, error) {
// security check for cluster access // security check for cluster access
@ -146,6 +105,12 @@ func ProcessWorkflowCreation(input *model.ChaosWorkFlowInput, wfType *dbSchemaWo
copier.Copy(&Weightages, &input.Weightages) copier.Copy(&Weightages, &input.Weightages)
} }
// Get cluster information
cluster, err := dbOperationsCluster.GetCluster(input.ClusterID)
if err != nil {
return err
}
newChaosWorkflow := dbSchemaWorkflow.ChaosWorkFlowInput{ newChaosWorkflow := dbSchemaWorkflow.ChaosWorkFlowInput{
WorkflowID: *input.WorkflowID, WorkflowID: *input.WorkflowID,
WorkflowManifest: input.WorkflowManifest, WorkflowManifest: input.WorkflowManifest,
@ -156,6 +121,8 @@ func ProcessWorkflowCreation(input *model.ChaosWorkFlowInput, wfType *dbSchemaWo
IsCustomWorkflow: input.IsCustomWorkflow, IsCustomWorkflow: input.IsCustomWorkflow,
ProjectID: input.ProjectID, ProjectID: input.ProjectID,
ClusterID: input.ClusterID, ClusterID: input.ClusterID,
ClusterName: cluster.ClusterName,
ClusterType: cluster.ClusterType,
Weightages: Weightages, Weightages: Weightages,
CreatedAt: strconv.FormatInt(time.Now().Unix(), 10), CreatedAt: strconv.FormatInt(time.Now().Unix(), 10),
UpdatedAt: strconv.FormatInt(time.Now().Unix(), 10), UpdatedAt: strconv.FormatInt(time.Now().Unix(), 10),
@ -163,7 +130,7 @@ func ProcessWorkflowCreation(input *model.ChaosWorkFlowInput, wfType *dbSchemaWo
IsRemoved: false, IsRemoved: false,
} }
err := dbOperationsWorkflow.InsertChaosWorkflow(newChaosWorkflow) err = dbOperationsWorkflow.InsertChaosWorkflow(newChaosWorkflow)
if err != nil { if err != nil {
return err return err
} }
@ -249,18 +216,20 @@ func SendWorkflowEvent(wfRun model.WorkflowRun, r *store.StateData) {
r.Mutex.Unlock() r.Mutex.Unlock()
} }
// ResiliencyScoreCalculator calculates the Rscore and returns the execdata string // ResiliencyScoreCalculator calculates the Resiliency Score and returns the updated ExecutionData
func ResiliencyScoreCalculator(execData string, wfid string) string { func ResiliencyScoreCalculator(execData types.ExecutionData, wfid string) types.ExecutionData {
var resiliency_score, weightSum, totalTestResult, totalExperiments, totalExperimentsPassed int = 0, 0, 0, 0, 0 var resiliencyScore float64 = 0.0
var jsonData WorkflowEvent var weightSum, totalTestResult, totalExperiments, totalExperimentsPassed int = 0, 0, 0, 0
json.Unmarshal([]byte(execData), &jsonData)
chaosWorkflows, _ := dbOperationsWorkflow.GetWorkflows(bson.D{{"workflow_id", bson.M{"$in": []string{wfid}}}}) chaosWorkflows, _ := dbOperationsWorkflow.GetWorkflows(bson.D{{"workflow_id", bson.M{"$in": []string{wfid}}}})
totalExperiments = len(chaosWorkflows[0].Weightages) totalExperiments = len(chaosWorkflows[0].Weightages)
weightMap := map[string]int{} weightMap := map[string]int{}
for _, weightEnty := range chaosWorkflows[0].Weightages { for _, weightEnty := range chaosWorkflows[0].Weightages {
weightMap[weightEnty.ExperimentName] = weightEnty.Weightage weightMap[weightEnty.ExperimentName] = weightEnty.Weightage
} }
for _, value := range jsonData.Nodes {
for _, value := range execData.Nodes {
if value.Type == "ChaosEngine" { if value.Type == "ChaosEngine" {
if value.ChaosExp == nil { if value.ChaosExp == nil {
continue continue
@ -276,12 +245,14 @@ func ResiliencyScoreCalculator(execData string, wfid string) string {
} }
} }
} }
if weightSum == 0 { if weightSum != 0 {
resiliency_score = 0 resiliencyScore = float64(totalTestResult) / float64(weightSum)
} else {
resiliency_score = (totalTestResult / weightSum)
} }
execData = "{" + `"resiliency_score":` + `"` + strconv.Itoa(resiliency_score) + `",` + `"experiments_passed":` + `"` + strconv.Itoa(totalExperimentsPassed) + `",` + `"total_experiments":` + `"` + strconv.Itoa(totalExperiments) + `",` + execData[1:]
execData.ResiliencyScore = resiliencyScore
execData.ExperimentsPassed = totalExperimentsPassed
execData.TotalExperiments = totalExperiments
return execData return execData
} }

View File

@ -0,0 +1,49 @@
package chaos_workflow
import chaosTypes "github.com/litmuschaos/chaos-operator/pkg/apis/litmuschaos/v1alpha1"
type ExecutionData struct {
WorkflowType string `json:"workflow_type"`
WorkflowID string `json:"-"`
EventType string `json:"event_type"`
UID string `json:"-"`
Namespace string `json:"namespace"`
Name string `json:"name"`
CreationTimestamp string `json:"creationTimestamp"`
Phase string `json:"phase"`
Message string `json:"message"`
StartedAt string `json:"startedAt"`
FinishedAt string `json:"finishedAt"`
Nodes map[string]Node `json:"nodes"`
ResiliencyScore float64 `json:"resiliency_score,string,omitempty"`
ExperimentsPassed int `json:"experiments_passed,string,omitempty"`
TotalExperiments int `json:"total_experiments,string,omitempty"`
}
// Node represents each node/step data
type Node struct {
Name string `json:"name"`
Phase string `json:"phase"`
Message string `json:"message"`
StartedAt string `json:"startedAt"`
FinishedAt string `json:"finishedAt"`
Children []string `json:"children"`
Type string `json:"type"`
ChaosExp *ChaosData `json:"chaosData,omitempty"`
}
// ChaosData is the data we get from chaos exporter
type ChaosData struct {
EngineUID string `json:"engineUID"`
EngineName string `json:"engineName"`
Namespace string `json:"namespace"`
ExperimentName string `json:"experimentName"`
ExperimentStatus string `json:"experimentStatus"`
LastUpdatedAt string `json:"lastUpdatedAt"`
ExperimentVerdict string `json:"experimentVerdict"`
ExperimentPod string `json:"experimentPod"`
RunnerPod string `json:"runnerPod"`
ProbeSuccessPercentage string `json:"probeSuccessPercentage"`
FailStep string `json:"failStep"`
ChaosResult *chaosTypes.ChaosResult `json:"chaosResult"`
}

View File

@ -20,29 +20,10 @@ type MongoOperator interface {
Replace(ctx context.Context, collectionType int, query bson.D, replacement interface{}) (*mongo.UpdateResult, error) Replace(ctx context.Context, collectionType int, query bson.D, replacement interface{}) (*mongo.UpdateResult, error)
Delete(ctx context.Context, collectionType int, query bson.D, opts ...*options.DeleteOptions) (*mongo.DeleteResult, error) Delete(ctx context.Context, collectionType int, query bson.D, opts ...*options.DeleteOptions) (*mongo.DeleteResult, error)
CountDocuments(ctx context.Context, collectionType int, query bson.D, opts ...*options.CountOptions) (int64, error) CountDocuments(ctx context.Context, collectionType int, query bson.D, opts ...*options.CountOptions) (int64, error)
Aggregate(ctx context.Context, collectionType int, pipeline interface{}, opts ...*options.AggregateOptions) (*mongo.Cursor, error)
GetCollection(collectionType int) (*mongo.Collection, error) GetCollection(collectionType int) (*mongo.Collection, error)
} }
type CollectionInterface interface {
InsertOne(ctx context.Context, document interface{},
opts ...*options.InsertOneOptions) (*mongo.InsertOneResult, error)
InsertMany(ctx context.Context, document interface{},
opts ...*options.InsertOneOptions) (*mongo.InsertOneResult, error)
FindOne(ctx context.Context, filter interface{},
opts ...*options.FindOneOptions) *mongo.SingleResult
Find(ctx context.Context, filter interface{},
opts ...*options.FindOptions) (*mongo.Cursor, error)
UpdateOne(ctx context.Context, filter interface{}, update interface{},
opts ...*options.UpdateOptions) (*mongo.UpdateResult, error)
UpdateMany(ctx context.Context, filter interface{}, update interface{},
opts ...*options.UpdateOptions) (*mongo.UpdateResult, error)
ReplaceOne(ctx context.Context, filter interface{},
replacement interface{}, opts ...*options.ReplaceOptions) (*mongo.UpdateResult, error)
DeleteOne(ctx context.Context, filter interface{},
opts ...*options.DeleteOptions) (*mongo.DeleteResult, error)
CountDocuments(ctx context.Context, filter interface{}, opts ...*options.CountOptions) (int64, error)
}
type MongoOperations struct{} type MongoOperations struct{}
var ( var (
@ -171,6 +152,18 @@ func (m *MongoOperations) CountDocuments(ctx context.Context, collectionType int
return result, nil return result, nil
} }
func (m *MongoOperations) Aggregate(ctx context.Context, collectionType int, pipeline interface{}, opts ...*options.AggregateOptions) (*mongo.Cursor, error) {
collection, err := m.GetCollection(collectionType)
if err != nil {
return nil, err
}
result, err := collection.Aggregate(ctx, pipeline, opts...)
if err != nil {
return nil, err
}
return result, nil
}
// GetCollection fetches the correct collection based on the collection type // GetCollection fetches the correct collection based on the collection type
func (m *MongoOperations) GetCollection(collectionType int) (*mongo.Collection, error) { func (m *MongoOperations) GetCollection(collectionType int) (*mongo.Collection, error) {
return GetCollectionClient.getCollection(collectionType) return GetCollectionClient.getCollection(collectionType)

View File

@ -5,6 +5,8 @@ import (
"errors" "errors"
"time" "time"
"go.mongodb.org/mongo-driver/mongo"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/database/mongodb" "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/database/mongodb"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
) )
@ -52,6 +54,10 @@ func UpdateWorkflowRun(workflowID string, wfRun ChaosWorkflowRun) (int, error) {
update := bson.D{ update := bson.D{
{"$set", bson.D{ {"$set", bson.D{
{"workflow_runs.$.last_updated", wfRun.LastUpdated}, {"workflow_runs.$.last_updated", wfRun.LastUpdated},
{"workflow_runs.$.phase", wfRun.Phase},
{"workflow_runs.$.resiliency_score", wfRun.ResiliencyScore},
{"workflow_runs.$.experiments_passed", wfRun.ExperimentsPassed},
{"workflow_runs.$.total_experiments", wfRun.TotalExperiments},
{"workflow_runs.$.execution_data", wfRun.ExecutionData}, {"workflow_runs.$.execution_data", wfRun.ExecutionData},
{"workflow_runs.$.completed", wfRun.Completed}, {"workflow_runs.$.completed", wfRun.Completed},
}}} }}}
@ -84,6 +90,18 @@ func GetWorkflows(query bson.D) ([]ChaosWorkFlowInput, error) {
return workflows, nil return workflows, nil
} }
// GetAggregateWorkflows takes a mongo pipeline to retrieve the workflow details from the database
func GetAggregateWorkflows(pipeline mongo.Pipeline) (*mongo.Cursor, error) {
ctx, _ := context.WithTimeout(backgroundContext, 10*time.Second)
results, err := mongodb.Operator.Aggregate(ctx, mongodb.WorkflowCollection, pipeline)
if err != nil {
return nil, err
}
return results, nil
}
// GetWorkflowsByClusterID takes a clusterID parameter to retrieve the workflow details from the database // GetWorkflowsByClusterID takes a clusterID parameter to retrieve the workflow details from the database
func GetWorkflowsByClusterID(clusterID string) ([]ChaosWorkFlowInput, error) { func GetWorkflowsByClusterID(clusterID string) ([]ChaosWorkFlowInput, error) {
query := bson.D{{"cluster_id", clusterID}} query := bson.D{{"cluster_id", clusterID}}

View File

@ -21,20 +21,53 @@ type ChaosWorkFlowInput struct {
CreatedAt string `bson:"created_at"` CreatedAt string `bson:"created_at"`
ProjectID string `bson:"project_id"` ProjectID string `bson:"project_id"`
ClusterID string `bson:"cluster_id"` ClusterID string `bson:"cluster_id"`
ClusterName string `bson:"cluster_name"`
ClusterType string `bson:"cluster_type"`
WorkflowRuns []*ChaosWorkflowRun `bson:"workflow_runs"` WorkflowRuns []*ChaosWorkflowRun `bson:"workflow_runs"`
IsRemoved bool `bson:"isRemoved"` IsRemoved bool `bson:"isRemoved"`
} }
// ChaosWorkflowRun contains the required fields to be stored in the database for a workflow run
type ChaosWorkflowRun struct {
WorkflowRunID string `bson:"workflow_run_id"`
LastUpdated string `bson:"last_updated"`
ExecutionData string `bson:"execution_data"`
Completed bool `bson:"completed"`
}
// WeightagesInput contains the required fields to be stored in the database for a weightages input // WeightagesInput contains the required fields to be stored in the database for a weightages input
type WeightagesInput struct { type WeightagesInput struct {
ExperimentName string `bson:"experiment_name"` ExperimentName string `bson:"experiment_name"`
Weightage int `bson:"weightage"` Weightage int `bson:"weightage"`
} }
// ChaosWorkflowRun contains the required fields to be stored in the database for a workflow run
type ChaosWorkflowRun struct {
WorkflowRunID string `bson:"workflow_run_id"`
LastUpdated string `bson:"last_updated"`
Phase string `bson:"phase"`
ResiliencyScore *float64 `bson:"resiliency_score,string,omitempty"`
ExperimentsPassed *int `bson:"experiments_passed,string,omitempty"`
TotalExperiments *int `bson:"total_experiments,string,omitempty"`
ExecutionData string `bson:"execution_data"`
Completed bool `bson:"completed"`
}
type AggregatedWorkflowRuns struct {
TotalFilteredWorkflowRuns []TotalFilteredWorkflowRuns `bson:"total_filtered_workflow_runs"`
FlattenedWorkflowRuns []FlattenedWorkflowRun `bson:"flattened_workflow_runs"`
}
type TotalFilteredWorkflowRuns struct {
Count int `bson:"count"`
}
type FlattenedWorkflowRun struct {
WorkflowID string `bson:"workflow_id"`
WorkflowManifest string `bson:"workflow_manifest"`
CronSyntax string `bson:"cronSyntax"`
WorkflowName string `bson:"workflow_name"`
WorkflowDescription string `bson:"workflow_description"`
Weightages []*WeightagesInput `bson:"weightages"`
IsCustomWorkflow bool `bson:"isCustomWorkflow"`
UpdatedAt string `bson:"updated_at"`
CreatedAt string `bson:"created_at"`
ProjectID string `bson:"project_id"`
ClusterID string `bson:"cluster_id"`
ClusterName string `bson:"cluster_name"`
ClusterType string `bson:"cluster_type"`
WorkflowRuns ChaosWorkflowRun `bson:"workflow_runs"`
IsRemoved bool `bson:"isRemoved"`
}