Enhancement: Moving pagination, sorting and filtering of workflow runs table to the backend (#2829)
* added pagination for QueryWorkflowRuns Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * filtering workflowRuns based on workflowRunIDs Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * changed the API for getWorkflowRuns in frontend Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * added pagination for frontend and refactored code to accomodate the changes Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * Added Sorting and Filtering Signed-off-by: SarthakJain26 <sarthak@chaosnative.com> * sorting added from backend api call Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * filtering removed from frontend and used backend APIs to filter data Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * typed execution data in backend and sent common metadata from execution data in workflowruns hence reducing the data size in frontend; sorting based on workflowrun phase done in backend Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * changing resiliency score to null in case of running workflows Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * WIP: filtering and sorting done, pagination remaining Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * pagination completed in database Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * reverted ID -> String changes Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * changed the sortStage Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * Added condition to check no workflows Signed-off-by: SarthakJain26 <sarthak@chaosnative.com> * Pagination bug fix (#1) * bug fix trails #1 Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * reverting local dev changes Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * fixed the workflow subscription bugs...EVERYTHING FINALLY WORKS Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * removed comments from config Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * resolved review comments: translations, formatting and removing binary file Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * fixed some bugs and added Execution data to types.go Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> * go fmt project Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com> Co-authored-by: SarthakJain26 <sarthak@chaosnative.com>
This commit is contained in:
parent
c77e5d24fe
commit
aa5fe68456
|
@ -416,6 +416,7 @@ chaosWorkflows:
|
|||
browseWorkflows:
|
||||
status: Status
|
||||
name: Name
|
||||
dateFilterHelperText: Select a period
|
||||
targetAgent: Target Agent
|
||||
reliabilityDetails: Reliability Details
|
||||
experiments: Experiments
|
||||
|
|
|
@ -1,17 +1,39 @@
|
|||
import { gql } from '@apollo/client';
|
||||
|
||||
export const WORKFLOW_DETAILS_WITH_EXEC_DATA = gql`
|
||||
query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
|
||||
getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
|
||||
total_no_of_workflow_runs
|
||||
workflow_runs {
|
||||
workflow_id
|
||||
workflow_name
|
||||
workflow_run_id
|
||||
cluster_name
|
||||
last_updated
|
||||
cluster_id
|
||||
phase
|
||||
execution_data
|
||||
resiliency_score
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const WORKFLOW_DETAILS = gql`
|
||||
query workflowDetails($projectID: String!) {
|
||||
getWorkFlowRuns(project_id: $projectID) {
|
||||
workflow_id
|
||||
workflow_name
|
||||
workflow_run_id
|
||||
execution_data
|
||||
project_id
|
||||
cluster_name
|
||||
last_updated
|
||||
cluster_type
|
||||
cluster_id
|
||||
query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
|
||||
getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
|
||||
total_no_of_workflow_runs
|
||||
workflow_runs {
|
||||
workflow_id
|
||||
workflow_name
|
||||
workflow_run_id
|
||||
cluster_name
|
||||
last_updated
|
||||
phase
|
||||
resiliency_score
|
||||
experiments_passed
|
||||
total_experiments
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
@ -150,6 +172,14 @@ export const GET_CLUSTER_LENGTH = gql`
|
|||
}
|
||||
`;
|
||||
|
||||
export const GET_CLUSTER_NAMES = gql`
|
||||
query getClusters($project_id: String!) {
|
||||
getCluster(project_id: $project_id) {
|
||||
cluster_name
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const ALL_USERS = gql`
|
||||
query allUsers {
|
||||
users {
|
||||
|
|
|
@ -1,16 +1,33 @@
|
|||
import { gql } from '@apollo/client';
|
||||
|
||||
export const WORKFLOW_EVENTS_WITH_EXEC_DATA = gql`
|
||||
subscription workflowEvents($projectID: String!) {
|
||||
workflowEventListener(project_id: $projectID) {
|
||||
workflow_id
|
||||
workflow_name
|
||||
workflow_run_id
|
||||
cluster_name
|
||||
last_updated
|
||||
cluster_id
|
||||
phase
|
||||
execution_data
|
||||
resiliency_score
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const WORKFLOW_EVENTS = gql`
|
||||
subscription workflowEvents($projectID: String!) {
|
||||
workflowEventListener(project_id: $projectID) {
|
||||
workflow_id
|
||||
workflow_name
|
||||
workflow_run_id
|
||||
execution_data
|
||||
project_id
|
||||
cluster_name
|
||||
last_updated
|
||||
cluster_id
|
||||
phase
|
||||
resiliency_score
|
||||
experiments_passed
|
||||
total_experiments
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
|
|
@ -53,16 +53,67 @@ export interface WorkflowRun {
|
|||
workflow_run_id: string;
|
||||
cluster_type: string;
|
||||
cluster_id: string;
|
||||
phase: string;
|
||||
resiliency_score?: number;
|
||||
experiments_passed?: number;
|
||||
total_experiments?: number;
|
||||
}
|
||||
|
||||
interface GetWorkflowRunsOutput {
|
||||
total_no_of_workflow_runs: number;
|
||||
workflow_runs: WorkflowRun[];
|
||||
}
|
||||
|
||||
export interface Workflow {
|
||||
getWorkFlowRuns: WorkflowRun[];
|
||||
getWorkflowRuns: GetWorkflowRunsOutput;
|
||||
}
|
||||
|
||||
export interface WorkflowSubscription {
|
||||
workflowEventListener: WorkflowRun;
|
||||
}
|
||||
|
||||
export interface WorkflowDataVars {
|
||||
export interface WorkflowSubscriptionInput {
|
||||
projectID: string;
|
||||
}
|
||||
|
||||
// Pagination
|
||||
export interface Pagination {
|
||||
page: number;
|
||||
limit: number;
|
||||
}
|
||||
|
||||
// Sort
|
||||
export interface SortInput {
|
||||
field: 'Name' | 'Time';
|
||||
descending?: boolean;
|
||||
}
|
||||
|
||||
// Filter
|
||||
interface DateRange {
|
||||
start_date: string;
|
||||
end_date?: string;
|
||||
}
|
||||
|
||||
export type WorkflowStatus =
|
||||
| 'All'
|
||||
| 'Failed'
|
||||
| 'Running'
|
||||
| 'Succeeded'
|
||||
| undefined;
|
||||
|
||||
export interface WorkflowRunFilterInput {
|
||||
workflow_name?: string;
|
||||
cluster_name?: string;
|
||||
workflow_status?: WorkflowStatus;
|
||||
date_range?: DateRange;
|
||||
}
|
||||
|
||||
export interface WorkflowDataVars {
|
||||
workflowRunsInput: {
|
||||
project_id: string;
|
||||
workflow_run_ids?: string[];
|
||||
pagination?: Pagination;
|
||||
sort?: SortInput;
|
||||
filter?: WorkflowRunFilterInput;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -4,15 +4,15 @@ import Tabs from '@material-ui/core/Tabs/Tabs';
|
|||
import React, { useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import BackButton from '../../components/Button/BackButton';
|
||||
import Loader from '../../components/Loader';
|
||||
import { StyledTab, TabPanel } from '../../components/Tabs';
|
||||
import Scaffold from '../../containers/layouts/Scaffold';
|
||||
import {
|
||||
SCHEDULE_DETAILS,
|
||||
WORKFLOW_DETAILS,
|
||||
WORKFLOW_EVENTS,
|
||||
WORKFLOW_DETAILS_WITH_EXEC_DATA,
|
||||
WORKFLOW_EVENTS_WITH_EXEC_DATA,
|
||||
} from '../../graphql';
|
||||
import {
|
||||
ScheduleDataVars,
|
||||
|
@ -24,6 +24,7 @@ import {
|
|||
Workflow,
|
||||
WorkflowDataVars,
|
||||
WorkflowSubscription,
|
||||
WorkflowSubscriptionInput,
|
||||
} from '../../models/graphql/workflowData';
|
||||
import useActions from '../../redux/actions';
|
||||
import * as NodeSelectionActions from '../../redux/actions/nodeSelection';
|
||||
|
@ -37,6 +38,10 @@ import WorkflowNodeInfo from '../../views/WorkflowDetails/WorkflowNodeInfo';
|
|||
import NodeTable from '../../views/WorkflowDetails/WorkflowTable';
|
||||
import useStyles from './styles';
|
||||
|
||||
interface URLParams {
|
||||
workflowRunId: string;
|
||||
}
|
||||
|
||||
const WorkflowDetails: React.FC = () => {
|
||||
const theme = useTheme();
|
||||
const { t } = useTranslation();
|
||||
|
@ -62,19 +67,23 @@ const WorkflowDetails: React.FC = () => {
|
|||
|
||||
const { pod_name } = useSelector((state: RootState) => state.selectedNode);
|
||||
|
||||
// Getting the workflow nome from the pathname
|
||||
const { pathname } = useLocation();
|
||||
const workflowRunId = pathname.split('/')[2];
|
||||
const { workflowRunId }: URLParams = useParams();
|
||||
|
||||
// Query to get workflows
|
||||
const { subscribeToMore, data, error } = useQuery<Workflow, WorkflowDataVars>(
|
||||
WORKFLOW_DETAILS,
|
||||
{ variables: { projectID } }
|
||||
WORKFLOW_DETAILS_WITH_EXEC_DATA,
|
||||
{
|
||||
variables: {
|
||||
workflowRunsInput: {
|
||||
project_id: projectID,
|
||||
workflow_run_ids: [workflowRunId],
|
||||
},
|
||||
},
|
||||
fetchPolicy: 'cache-and-network',
|
||||
}
|
||||
);
|
||||
|
||||
const workflow = data?.getWorkFlowRuns.filter(
|
||||
(w) => w.workflow_run_id === workflowRunId
|
||||
)[0];
|
||||
const workflow = data?.getWorkflowRuns.workflow_runs[0];
|
||||
|
||||
// Apollo query to get the scheduled data
|
||||
const { data: SchedulesData, loading } = useQuery<
|
||||
|
@ -87,34 +96,32 @@ const WorkflowDetails: React.FC = () => {
|
|||
|
||||
// Using subscription to get realtime data
|
||||
useEffect(() => {
|
||||
if (
|
||||
workflow?.execution_data &&
|
||||
(JSON.parse(workflow?.execution_data) as ExecutionData).phase ===
|
||||
'Running'
|
||||
) {
|
||||
subscribeToMore<WorkflowSubscription>({
|
||||
document: WORKFLOW_EVENTS,
|
||||
if (workflow?.phase && workflow.phase === 'Running') {
|
||||
subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({
|
||||
document: WORKFLOW_EVENTS_WITH_EXEC_DATA,
|
||||
variables: { projectID },
|
||||
updateQuery: (prev, { subscriptionData }) => {
|
||||
if (!subscriptionData.data) return prev;
|
||||
const modifiedWorkflows = prev.getWorkFlowRuns.slice();
|
||||
if (!subscriptionData.data || !prev || !prev.getWorkflowRuns)
|
||||
return prev;
|
||||
|
||||
const modifiedWorkflows = prev.getWorkflowRuns.workflow_runs.slice();
|
||||
const newWorkflow = subscriptionData.data.workflowEventListener;
|
||||
|
||||
// Updating the query data
|
||||
let i = 0;
|
||||
for (; i < modifiedWorkflows.length; i++) {
|
||||
if (
|
||||
modifiedWorkflows[i].workflow_run_id ===
|
||||
newWorkflow.workflow_run_id
|
||||
) {
|
||||
modifiedWorkflows[i] = newWorkflow;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i === modifiedWorkflows.length)
|
||||
modifiedWorkflows.unshift(newWorkflow);
|
||||
// Update only the required workflowRun
|
||||
if (
|
||||
modifiedWorkflows[0].workflow_run_id === newWorkflow.workflow_run_id
|
||||
)
|
||||
modifiedWorkflows[0] = newWorkflow;
|
||||
|
||||
return { ...prev, getWorkFlowRuns: modifiedWorkflows };
|
||||
const totalNoOfWorkflows =
|
||||
prev.getWorkflowRuns.total_no_of_workflow_runs;
|
||||
|
||||
return {
|
||||
getWorkflowRuns: {
|
||||
total_no_of_workflow_runs: totalNoOfWorkflows,
|
||||
workflow_runs: modifiedWorkflows,
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -233,6 +240,7 @@ const WorkflowDetails: React.FC = () => {
|
|||
data={
|
||||
JSON.parse(workflow.execution_data) as ExecutionData
|
||||
}
|
||||
resiliency_score={workflow.resiliency_score}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
@ -245,6 +253,7 @@ const WorkflowDetails: React.FC = () => {
|
|||
tab={2}
|
||||
cluster_name={workflow.cluster_name}
|
||||
data={JSON.parse(workflow.execution_data) as ExecutionData}
|
||||
resiliency_score={workflow.resiliency_score}
|
||||
/>
|
||||
{/* Table for all Node details */}
|
||||
<NodeTable
|
||||
|
|
|
@ -18,16 +18,16 @@ import React, { useState } from 'react';
|
|||
import { DateRangePicker } from 'react-date-range';
|
||||
import 'react-date-range/dist/styles.css'; // main css file
|
||||
import 'react-date-range/dist/theme/default.css'; // theme css file
|
||||
import { Workflow, WorkflowRun } from '../../../models/graphql/workflowData';
|
||||
import { Clusters } from '../../../models/graphql/clusterData';
|
||||
import { WorkflowStatus } from '../../../models/graphql/workflowData';
|
||||
import useStyles from './styles';
|
||||
|
||||
interface HeaderSectionProps {
|
||||
searchValue: string;
|
||||
statusValue: string;
|
||||
clusterValue: string;
|
||||
searchValue?: string;
|
||||
statusValue?: WorkflowStatus;
|
||||
clusterValue?: string;
|
||||
isOpen: boolean;
|
||||
data: Workflow | undefined;
|
||||
getClusters: (wfdata: WorkflowRun[]) => string[];
|
||||
clusterList?: Partial<Clusters>;
|
||||
isDateOpen: boolean;
|
||||
popAnchorEl: HTMLElement | null;
|
||||
displayDate: string;
|
||||
|
@ -62,10 +62,9 @@ const HeaderSection: React.FC<HeaderSectionProps> = ({
|
|||
statusValue,
|
||||
clusterValue,
|
||||
isOpen,
|
||||
data,
|
||||
popAnchorEl,
|
||||
displayDate,
|
||||
getClusters,
|
||||
clusterList,
|
||||
changeSearch,
|
||||
changeStatus,
|
||||
changeCluster,
|
||||
|
@ -82,6 +81,7 @@ const HeaderSection: React.FC<HeaderSectionProps> = ({
|
|||
key: 'selection',
|
||||
},
|
||||
]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className={classes.headerSection}>
|
||||
|
@ -127,13 +127,11 @@ const HeaderSection: React.FC<HeaderSectionProps> = ({
|
|||
className={classes.selectText}
|
||||
>
|
||||
<MenuItem value="All">All</MenuItem>
|
||||
{(data ? getClusters(data.getWorkFlowRuns) : []).map(
|
||||
(cluster: string) => (
|
||||
<MenuItem key={cluster} value={cluster}>
|
||||
{cluster}
|
||||
</MenuItem>
|
||||
)
|
||||
)}
|
||||
{clusterList?.getCluster?.map((cluster) => (
|
||||
<MenuItem key={cluster.cluster_name} value={cluster.cluster_name}>
|
||||
{cluster.cluster_name}
|
||||
</MenuItem>
|
||||
))}
|
||||
</Select>
|
||||
</FormControl>
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { useQuery } from '@apollo/client';
|
||||
import {
|
||||
Button,
|
||||
IconButton,
|
||||
|
@ -7,36 +8,31 @@ import {
|
|||
TableCell,
|
||||
Typography,
|
||||
} from '@material-ui/core';
|
||||
import { useQuery } from '@apollo/client';
|
||||
import ChevronRightIcon from '@material-ui/icons/ChevronRight';
|
||||
import KeyboardArrowDownIcon from '@material-ui/icons/KeyboardArrowDown';
|
||||
import MoreVertIcon from '@material-ui/icons/MoreVert';
|
||||
import React from 'react';
|
||||
import KeyboardArrowDownIcon from '@material-ui/icons/KeyboardArrowDown';
|
||||
import ChevronRightIcon from '@material-ui/icons/ChevronRight';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import {
|
||||
ExecutionData,
|
||||
WorkflowRun,
|
||||
} from '../../../models/graphql/workflowData';
|
||||
import { history } from '../../../redux/configureStore';
|
||||
import { getProjectID, getProjectRole } from '../../../utils/getSearchParams';
|
||||
import CustomStatus from '../CustomStatus/Status';
|
||||
import useStyles from './styles';
|
||||
import useActions from '../../../redux/actions';
|
||||
import * as NodeSelectionActions from '../../../redux/actions/nodeSelection';
|
||||
import TimePopOver from '../../../components/TimePopOver';
|
||||
import { WORKFLOW_LIST_DETAILS } from '../../../graphql';
|
||||
import { WorkflowRun } from '../../../models/graphql/workflowData';
|
||||
import {
|
||||
WorkflowList,
|
||||
WorkflowListDataVars,
|
||||
} from '../../../models/graphql/workflowListData';
|
||||
import useActions from '../../../redux/actions';
|
||||
import * as NodeSelectionActions from '../../../redux/actions/nodeSelection';
|
||||
import { history } from '../../../redux/configureStore';
|
||||
import { getProjectID, getProjectRole } from '../../../utils/getSearchParams';
|
||||
import ExperimentPoints from '../BrowseSchedule/ExperimentPoints';
|
||||
import TimePopOver from '../../../components/TimePopOver';
|
||||
import CustomStatus from '../CustomStatus/Status';
|
||||
import useStyles from './styles';
|
||||
|
||||
interface TableDataProps {
|
||||
data: WorkflowRun;
|
||||
exeData: ExecutionData;
|
||||
data: Partial<WorkflowRun>;
|
||||
}
|
||||
|
||||
const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
|
||||
const TableData: React.FC<TableDataProps> = ({ data }) => {
|
||||
const classes = useStyles();
|
||||
const projectID = getProjectID();
|
||||
const projectRole = getProjectRole();
|
||||
|
@ -96,9 +92,7 @@ const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
|
|||
return (
|
||||
<>
|
||||
<TableCell className={classes.tableDataStatus}>
|
||||
<CustomStatus
|
||||
status={exeData.finishedAt.length === 0 ? 'Running' : exeData.phase}
|
||||
/>
|
||||
<CustomStatus status={data.phase ?? ''} />
|
||||
</TableCell>
|
||||
<TableCell
|
||||
className={classes.workflowNameData}
|
||||
|
@ -119,23 +113,24 @@ const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
|
|||
</TableCell>
|
||||
<TableCell>
|
||||
<Typography className={classes.clusterName}>
|
||||
{nameCapitalized(data.cluster_name)}
|
||||
{nameCapitalized(data.cluster_name ?? '')}
|
||||
</Typography>
|
||||
</TableCell>
|
||||
<TableCell className={classes.reliabiltyData}>
|
||||
<Typography>
|
||||
<span>{t('chaosWorkflows.browseWorkflows.tableData.overallRR')}</span>
|
||||
{!exeData.resiliency_score ? (
|
||||
{data.resiliency_score === undefined ||
|
||||
data.resiliency_score === null ? (
|
||||
<span className={classes.less}>
|
||||
{t('chaosWorkflows.browseWorkflows.tableData.na')}
|
||||
</span>
|
||||
) : (
|
||||
<span
|
||||
className={`${classes.boldText} ${getResiliencyScoreColor(
|
||||
exeData.resiliency_score
|
||||
data.resiliency_score
|
||||
)}`}
|
||||
>
|
||||
{exeData.resiliency_score}%
|
||||
{data.resiliency_score}%
|
||||
</span>
|
||||
)}
|
||||
</Typography>
|
||||
|
@ -143,17 +138,22 @@ const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
|
|||
<span>
|
||||
{t('chaosWorkflows.browseWorkflows.tableData.experimentsPassed')}
|
||||
</span>
|
||||
{!exeData.resiliency_score ? (
|
||||
{data.experiments_passed === undefined ||
|
||||
data.experiments_passed === null ||
|
||||
data.total_experiments === undefined ||
|
||||
data.total_experiments === null ||
|
||||
data.resiliency_score === undefined ||
|
||||
data.resiliency_score === null ? (
|
||||
<span className={classes.less}>
|
||||
{t('chaosWorkflows.browseWorkflows.tableData.na')}
|
||||
</span>
|
||||
) : (
|
||||
<span
|
||||
className={`${classes.boldText} ${getResiliencyScoreColor(
|
||||
exeData.resiliency_score
|
||||
data.resiliency_score
|
||||
)}`}
|
||||
>
|
||||
{exeData.experiments_passed}/{exeData.total_experiments}
|
||||
{data.experiments_passed}/{data.total_experiments}
|
||||
</span>
|
||||
)}
|
||||
</Typography>
|
||||
|
@ -209,7 +209,7 @@ const TableData: React.FC<TableDataProps> = ({ data, exeData }) => {
|
|||
</div>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<TimePopOver unixTime={data.last_updated} />
|
||||
<TimePopOver unixTime={data.last_updated ?? ''} />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<IconButton
|
||||
|
|
|
@ -16,74 +16,134 @@ import ExpandMoreIcon from '@material-ui/icons/ExpandMore';
|
|||
import moment from 'moment';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { WORKFLOW_DETAILS, WORKFLOW_EVENTS } from '../../../graphql';
|
||||
import {
|
||||
ExecutionData,
|
||||
GET_CLUSTER_NAMES,
|
||||
WORKFLOW_DETAILS,
|
||||
WORKFLOW_EVENTS,
|
||||
} from '../../../graphql';
|
||||
import { Clusters, ClusterVars } from '../../../models/graphql/clusterData';
|
||||
import {
|
||||
Pagination,
|
||||
SortInput,
|
||||
Workflow,
|
||||
WorkflowDataVars,
|
||||
WorkflowRun,
|
||||
WorkflowRunFilterInput,
|
||||
WorkflowStatus,
|
||||
WorkflowSubscription,
|
||||
WorkflowSubscriptionInput,
|
||||
} from '../../../models/graphql/workflowData';
|
||||
import { getProjectID } from '../../../utils/getSearchParams';
|
||||
import {
|
||||
sortAlphaAsc,
|
||||
sortAlphaDesc,
|
||||
sortNumAsc,
|
||||
sortNumDesc,
|
||||
} from '../../../utils/sort';
|
||||
import HeaderSection from './HeaderSection';
|
||||
import useStyles from './styles';
|
||||
import TableData from './TableData';
|
||||
|
||||
interface FilterOptions {
|
||||
search: string;
|
||||
status: string;
|
||||
cluster: string;
|
||||
}
|
||||
|
||||
interface PaginationData {
|
||||
pageNo: number;
|
||||
rowsPerPage: number;
|
||||
}
|
||||
|
||||
interface SortData {
|
||||
lastRun: { sort: boolean; ascending: boolean };
|
||||
name: { sort: boolean; ascending: boolean };
|
||||
noOfSteps: { sort: boolean; ascending: boolean };
|
||||
}
|
||||
|
||||
interface DateData {
|
||||
dateValue: string;
|
||||
fromDate: string;
|
||||
toDate: string;
|
||||
}
|
||||
|
||||
const BrowseWorkflow: React.FC = () => {
|
||||
const classes = useStyles();
|
||||
const projectID = getProjectID();
|
||||
const { t } = useTranslation();
|
||||
|
||||
// State for pagination
|
||||
const [paginationData, setPaginationData] = useState<Pagination>({
|
||||
page: 0,
|
||||
limit: 10,
|
||||
});
|
||||
|
||||
// States for filters
|
||||
const [filters, setFilters] = useState<WorkflowRunFilterInput>({
|
||||
workflow_name: '',
|
||||
cluster_name: 'All',
|
||||
workflow_status: 'All',
|
||||
date_range: {
|
||||
start_date: new Date(0).valueOf().toString(),
|
||||
},
|
||||
});
|
||||
|
||||
// State for date to be displayed
|
||||
const [displayDate, setDisplayDate] = React.useState<string>(
|
||||
t('chaosWorkflows.browseWorkflows.dateFilterHelperText')
|
||||
);
|
||||
|
||||
// State for sorting
|
||||
const [sortData, setSortData] = useState<SortInput>({
|
||||
field: 'Time',
|
||||
descending: true,
|
||||
});
|
||||
|
||||
// Checks if the workflow event from subscription exists in the table
|
||||
function isFiltered(newWorkflow: WorkflowRun) {
|
||||
const nameExists =
|
||||
filters.workflow_name &&
|
||||
newWorkflow.workflow_name
|
||||
.toLowerCase()
|
||||
.includes(filters.workflow_name.toLowerCase());
|
||||
|
||||
const clusterExists =
|
||||
filters.cluster_name === 'All' ||
|
||||
filters.cluster_name === newWorkflow.cluster_name;
|
||||
|
||||
const phaseExists =
|
||||
filters.workflow_status === 'All' ||
|
||||
filters.workflow_status === newWorkflow.phase;
|
||||
|
||||
const dateExists =
|
||||
filters.date_range &&
|
||||
newWorkflow.last_updated >= filters.date_range.start_date &&
|
||||
(filters.date_range.end_date
|
||||
? newWorkflow.last_updated < filters.date_range.end_date
|
||||
: true);
|
||||
|
||||
const shouldAddNewWorkflow =
|
||||
nameExists && clusterExists && phaseExists && dateExists;
|
||||
|
||||
return shouldAddNewWorkflow;
|
||||
}
|
||||
|
||||
// Query to get list of Clusters
|
||||
const { data: clusterList } = useQuery<Partial<Clusters>, ClusterVars>(
|
||||
GET_CLUSTER_NAMES,
|
||||
{
|
||||
variables: {
|
||||
project_id: projectID,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
// Query to get workflows
|
||||
const { subscribeToMore, data, error } = useQuery<Workflow, WorkflowDataVars>(
|
||||
WORKFLOW_DETAILS,
|
||||
{
|
||||
variables: { projectID },
|
||||
variables: {
|
||||
workflowRunsInput: {
|
||||
project_id: projectID,
|
||||
pagination: {
|
||||
page: paginationData.page,
|
||||
limit: paginationData.limit,
|
||||
},
|
||||
sort: sortData,
|
||||
filter: filters,
|
||||
},
|
||||
},
|
||||
fetchPolicy: 'cache-and-network',
|
||||
}
|
||||
);
|
||||
|
||||
// Using subscription to get realtime data
|
||||
useEffect(() => {
|
||||
subscribeToMore<WorkflowSubscription>({
|
||||
subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({
|
||||
document: WORKFLOW_EVENTS,
|
||||
variables: { projectID },
|
||||
updateQuery: (prev, { subscriptionData }) => {
|
||||
if (!subscriptionData.data) return prev;
|
||||
const modifiedWorkflows = prev.getWorkFlowRuns.slice();
|
||||
if (!subscriptionData.data || !prev || !prev.getWorkflowRuns)
|
||||
return prev;
|
||||
|
||||
const modifiedWorkflows = prev.getWorkflowRuns.workflow_runs.slice();
|
||||
const newWorkflow = subscriptionData.data.workflowEventListener;
|
||||
|
||||
// Updating the query data
|
||||
let i = 0;
|
||||
let totalNoOfWorkflows = prev.getWorkflowRuns.total_no_of_workflow_runs;
|
||||
|
||||
for (; i < modifiedWorkflows.length; i++) {
|
||||
if (
|
||||
modifiedWorkflows[i].workflow_run_id === newWorkflow.workflow_run_id
|
||||
|
@ -92,34 +152,21 @@ const BrowseWorkflow: React.FC = () => {
|
|||
break;
|
||||
}
|
||||
}
|
||||
if (i === modifiedWorkflows.length)
|
||||
if (i === modifiedWorkflows.length && isFiltered(newWorkflow)) {
|
||||
totalNoOfWorkflows++;
|
||||
modifiedWorkflows.unshift(newWorkflow);
|
||||
}
|
||||
|
||||
return { ...prev, getWorkFlowRuns: modifiedWorkflows };
|
||||
return {
|
||||
getWorkflowRuns: {
|
||||
total_no_of_workflow_runs: totalNoOfWorkflows,
|
||||
workflow_runs: modifiedWorkflows,
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
}, [data]);
|
||||
|
||||
// States for filters
|
||||
const [filters, setFilters] = useState<FilterOptions>({
|
||||
search: '',
|
||||
status: 'All',
|
||||
cluster: 'All',
|
||||
});
|
||||
|
||||
// State for sorting
|
||||
const [sortData, setSortData] = useState<SortData>({
|
||||
lastRun: { sort: true, ascending: true },
|
||||
name: { sort: false, ascending: true },
|
||||
noOfSteps: { sort: false, ascending: false },
|
||||
});
|
||||
|
||||
// State for pagination
|
||||
const [paginationData, setPaginationData] = useState<PaginationData>({
|
||||
pageNo: 0,
|
||||
rowsPerPage: 5,
|
||||
});
|
||||
|
||||
const [popAnchorEl, setPopAnchorEl] = React.useState<null | HTMLElement>(
|
||||
null
|
||||
);
|
||||
|
@ -136,96 +183,14 @@ const BrowseWorkflow: React.FC = () => {
|
|||
setOpen(true);
|
||||
};
|
||||
|
||||
// State for start date and end date
|
||||
const [dateRange, setDateRange] = React.useState<DateData>({
|
||||
dateValue: 'Select a period',
|
||||
fromDate: new Date(0).toString(),
|
||||
toDate: new Date(new Date().setHours(23, 59, 59)).toString(),
|
||||
});
|
||||
const workflowRuns = data?.getWorkflowRuns.workflow_runs;
|
||||
|
||||
const getClusters = (searchingData: WorkflowRun[]) => {
|
||||
const uniqueList: string[] = [];
|
||||
searchingData.forEach((data) => {
|
||||
if (!uniqueList.includes(data.cluster_name)) {
|
||||
uniqueList.push(data.cluster_name);
|
||||
}
|
||||
});
|
||||
return uniqueList;
|
||||
};
|
||||
|
||||
const filteredData = data?.getWorkFlowRuns
|
||||
.filter((dataRow) =>
|
||||
dataRow.workflow_name.toLowerCase().includes(filters.search.toLowerCase())
|
||||
)
|
||||
.filter((dataRow) =>
|
||||
filters.status === 'All'
|
||||
? true
|
||||
: (JSON.parse(dataRow.execution_data) as ExecutionData).phase.includes(
|
||||
filters.status
|
||||
)
|
||||
)
|
||||
.filter((dataRow) =>
|
||||
filters.cluster === 'All'
|
||||
? true
|
||||
: dataRow.cluster_name
|
||||
.toLowerCase()
|
||||
.includes(filters.cluster.toLowerCase())
|
||||
)
|
||||
.filter((dataRow) => {
|
||||
return dateRange.fromDate && dateRange.toDate === undefined
|
||||
? true
|
||||
: parseInt(dataRow.last_updated, 10) * 1000 >=
|
||||
new Date(moment(dateRange.fromDate).format()).getTime() &&
|
||||
parseInt(dataRow.last_updated, 10) * 1000 <=
|
||||
new Date(moment(dateRange.toDate).format()).getTime();
|
||||
})
|
||||
.sort((a: WorkflowRun, b: WorkflowRun) => {
|
||||
// Sorting based on unique fields
|
||||
if (sortData.name.sort) {
|
||||
const x = a.workflow_name;
|
||||
const y = b.workflow_name;
|
||||
|
||||
return sortData.name.ascending
|
||||
? sortAlphaAsc(x, y)
|
||||
: sortAlphaDesc(x, y);
|
||||
}
|
||||
|
||||
if (sortData.lastRun.sort) {
|
||||
const x = parseInt(a.last_updated, 10);
|
||||
const y = parseInt(b.last_updated, 10);
|
||||
|
||||
return sortData.lastRun.ascending
|
||||
? sortNumAsc(y, x)
|
||||
: sortNumDesc(y, x);
|
||||
}
|
||||
|
||||
return 0;
|
||||
})
|
||||
.sort((a: WorkflowRun, b: WorkflowRun) => {
|
||||
// Sorting based on non-unique fields
|
||||
if (sortData.noOfSteps.sort) {
|
||||
const x = Object.keys(
|
||||
(JSON.parse(a.execution_data) as ExecutionData).nodes
|
||||
).length;
|
||||
|
||||
const y = Object.keys(
|
||||
(JSON.parse(b.execution_data) as ExecutionData).nodes
|
||||
).length;
|
||||
|
||||
return sortData.noOfSteps.ascending
|
||||
? sortNumAsc(x, y)
|
||||
: sortNumDesc(x, y);
|
||||
}
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
// Functions passed as props in the headerSeaction
|
||||
// Functions passed as props in the headerSection
|
||||
const changeSearch = (
|
||||
event: React.ChangeEvent<HTMLTextAreaElement | HTMLInputElement>
|
||||
) => {
|
||||
setFilters({ ...filters, search: event.target.value as string });
|
||||
setPaginationData({ ...paginationData, pageNo: 0 });
|
||||
setFilters({ ...filters, workflow_name: event.target.value as string });
|
||||
setPaginationData({ ...paginationData, page: 0 });
|
||||
};
|
||||
|
||||
const changeStatus = (
|
||||
|
@ -234,8 +199,11 @@ const BrowseWorkflow: React.FC = () => {
|
|||
value: unknown;
|
||||
}>
|
||||
) => {
|
||||
setFilters({ ...filters, status: event.target.value as string });
|
||||
setPaginationData({ ...paginationData, pageNo: 0 });
|
||||
setFilters({
|
||||
...filters,
|
||||
workflow_status: event.target.value as WorkflowStatus,
|
||||
});
|
||||
setPaginationData({ ...paginationData, page: 0 });
|
||||
};
|
||||
|
||||
const changeCluster = (
|
||||
|
@ -244,33 +212,34 @@ const BrowseWorkflow: React.FC = () => {
|
|||
value: unknown;
|
||||
}>
|
||||
) => {
|
||||
setFilters({ ...filters, cluster: event.target.value as string });
|
||||
setPaginationData({ ...paginationData, pageNo: 0 });
|
||||
setFilters({ ...filters, cluster_name: event.target.value as string });
|
||||
setPaginationData({ ...paginationData, page: 0 });
|
||||
};
|
||||
|
||||
// Function to set the date range for filtering
|
||||
const dateChange = (selectFromDate: string, selectToDate: string) => {
|
||||
setDateRange({
|
||||
dateValue: `${moment(selectFromDate)
|
||||
.format('DD.MM.YYYY')
|
||||
.toString()}-${moment(selectToDate).format('DD.MM.YYYY').toString()}`,
|
||||
fromDate: new Date(new Date(selectFromDate).setHours(0, 0, 0)).toString(),
|
||||
toDate: new Date(new Date(selectToDate).setHours(23, 59, 59)).toString(),
|
||||
const dateChange = (selectStartDate: string, selectEndDate: string) => {
|
||||
// Change filter value for date range
|
||||
setFilters({
|
||||
...filters,
|
||||
date_range: {
|
||||
start_date: new Date(selectStartDate)
|
||||
.setHours(0, 0, 0)
|
||||
.valueOf()
|
||||
.toString(),
|
||||
end_date: new Date(selectEndDate)
|
||||
.setHours(23, 59, 59)
|
||||
.valueOf()
|
||||
.toString(),
|
||||
},
|
||||
});
|
||||
};
|
||||
// Function to validate execution_data JSON
|
||||
const dataPerRow = (dataRow: WorkflowRun) => {
|
||||
let exe_data;
|
||||
try {
|
||||
exe_data = JSON.parse(dataRow.execution_data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return <></>;
|
||||
}
|
||||
return (
|
||||
<TableRow data-cy="WorkflowRunsTableRow" key={dataRow.workflow_run_id}>
|
||||
<TableData data={dataRow} exeData={exe_data} />
|
||||
</TableRow>
|
||||
|
||||
// Change the display value of date range
|
||||
setDisplayDate(
|
||||
`${moment(selectStartDate).format('DD.MM.YYYY').toString()}-${moment(
|
||||
selectEndDate
|
||||
)
|
||||
.format('DD.MM.YYYY')
|
||||
.toString()}`
|
||||
);
|
||||
};
|
||||
|
||||
|
@ -279,20 +248,19 @@ const BrowseWorkflow: React.FC = () => {
|
|||
<section className="Heading section">
|
||||
{/* Header Section */}
|
||||
<HeaderSection
|
||||
searchValue={filters.search}
|
||||
searchValue={filters.workflow_name}
|
||||
changeSearch={changeSearch}
|
||||
statusValue={filters.status}
|
||||
statusValue={filters.workflow_status}
|
||||
changeStatus={changeStatus}
|
||||
clusterValue={filters.cluster}
|
||||
clusterValue={filters.cluster_name}
|
||||
changeCluster={changeCluster}
|
||||
popOverClick={handlePopOverClick}
|
||||
popOverClose={handlePopOverClose}
|
||||
isOpen={isOpen}
|
||||
data={data}
|
||||
getClusters={getClusters}
|
||||
clusterList={clusterList}
|
||||
popAnchorEl={popAnchorEl}
|
||||
isDateOpen={open}
|
||||
displayDate={dateRange.dateValue}
|
||||
displayDate={displayDate}
|
||||
selectDate={dateChange}
|
||||
/>
|
||||
</section>
|
||||
|
@ -321,9 +289,7 @@ const BrowseWorkflow: React.FC = () => {
|
|||
size="small"
|
||||
onClick={() =>
|
||||
setSortData({
|
||||
...sortData,
|
||||
name: { sort: true, ascending: true },
|
||||
lastRun: { sort: false, ascending: true },
|
||||
field: 'Name',
|
||||
})
|
||||
}
|
||||
>
|
||||
|
@ -334,9 +300,8 @@ const BrowseWorkflow: React.FC = () => {
|
|||
size="small"
|
||||
onClick={() =>
|
||||
setSortData({
|
||||
...sortData,
|
||||
name: { sort: true, ascending: false },
|
||||
lastRun: { sort: false, ascending: false },
|
||||
field: 'Name',
|
||||
descending: true,
|
||||
})
|
||||
}
|
||||
>
|
||||
|
@ -379,9 +344,8 @@ const BrowseWorkflow: React.FC = () => {
|
|||
size="small"
|
||||
onClick={() =>
|
||||
setSortData({
|
||||
...sortData,
|
||||
lastRun: { sort: true, ascending: true },
|
||||
name: { sort: false, ascending: true },
|
||||
field: 'Time',
|
||||
descending: true,
|
||||
})
|
||||
}
|
||||
>
|
||||
|
@ -392,9 +356,7 @@ const BrowseWorkflow: React.FC = () => {
|
|||
size="small"
|
||||
onClick={() =>
|
||||
setSortData({
|
||||
...sortData,
|
||||
lastRun: { sort: true, ascending: false },
|
||||
name: { sort: false, ascending: true },
|
||||
field: 'Time',
|
||||
})
|
||||
}
|
||||
>
|
||||
|
@ -419,14 +381,15 @@ const BrowseWorkflow: React.FC = () => {
|
|||
</Typography>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
) : filteredData && filteredData.length ? (
|
||||
filteredData
|
||||
.slice(
|
||||
paginationData.pageNo * paginationData.rowsPerPage,
|
||||
paginationData.pageNo * paginationData.rowsPerPage +
|
||||
paginationData.rowsPerPage
|
||||
)
|
||||
.map((dataRow) => dataPerRow(dataRow))
|
||||
) : workflowRuns && workflowRuns.length ? (
|
||||
workflowRuns.map((dataRow) => (
|
||||
<TableRow
|
||||
data-cy="WorkflowRunsTableRow"
|
||||
key={dataRow.workflow_run_id}
|
||||
>
|
||||
<TableData data={dataRow} />
|
||||
</TableRow>
|
||||
))
|
||||
) : (
|
||||
<TableRow>
|
||||
<TableCell colSpan={7}>
|
||||
|
@ -442,19 +405,19 @@ const BrowseWorkflow: React.FC = () => {
|
|||
|
||||
{/* Pagination */}
|
||||
<TablePagination
|
||||
rowsPerPageOptions={[5, 10, 25]}
|
||||
rowsPerPageOptions={[10, 25, 50]}
|
||||
component="div"
|
||||
count={filteredData?.length ?? 0}
|
||||
rowsPerPage={paginationData.rowsPerPage}
|
||||
page={paginationData.pageNo}
|
||||
count={data?.getWorkflowRuns.total_no_of_workflow_runs ?? 0}
|
||||
rowsPerPage={paginationData.limit}
|
||||
page={paginationData.page}
|
||||
onChangePage={(_, page) =>
|
||||
setPaginationData({ ...paginationData, pageNo: page })
|
||||
setPaginationData({ ...paginationData, page })
|
||||
}
|
||||
onChangeRowsPerPage={(event) =>
|
||||
setPaginationData({
|
||||
...paginationData,
|
||||
pageNo: 0,
|
||||
rowsPerPage: parseInt(event.target.value, 10),
|
||||
page: 0,
|
||||
limit: parseInt(event.target.value, 10),
|
||||
})
|
||||
}
|
||||
/>
|
||||
|
|
|
@ -19,7 +19,7 @@ import {
|
|||
import useStyles from './styles';
|
||||
|
||||
interface WorkflowRunCardProps {
|
||||
data: WorkflowRun;
|
||||
data: Partial<WorkflowRun>;
|
||||
}
|
||||
|
||||
const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
|
||||
|
@ -31,7 +31,7 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
|
|||
|
||||
const nodeSelection = useActions(NodeSelectionActions);
|
||||
|
||||
function getPhaseVariant(variant: string): string {
|
||||
function getPhaseVariant(variant: string | undefined): string {
|
||||
switch (variant) {
|
||||
case SUCCEEDED:
|
||||
return classes.succeeded;
|
||||
|
@ -56,8 +56,6 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
|
|||
return classes.highScore;
|
||||
}
|
||||
|
||||
const executionData = JSON.parse(data.execution_data);
|
||||
|
||||
return (
|
||||
<Link
|
||||
underline="none"
|
||||
|
@ -80,7 +78,7 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
|
|||
<div>
|
||||
<div className={classes.statusDiv}>
|
||||
<svg viewBox="0 0 10 10">
|
||||
<circle className={getPhaseVariant(executionData.phase)} />
|
||||
<circle className={getPhaseVariant(data.phase)} />
|
||||
</svg>
|
||||
<div>
|
||||
<Typography
|
||||
|
@ -102,13 +100,12 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
|
|||
)}
|
||||
</Typography>
|
||||
<Typography
|
||||
className={getResiliencyScoreVariant(
|
||||
executionData.resiliency_score
|
||||
)}
|
||||
className={getResiliencyScoreVariant(data.resiliency_score ?? 0)}
|
||||
>
|
||||
{executionData.resiliency_score
|
||||
? `${executionData.resiliency_score}%`
|
||||
: '--'}
|
||||
{data.resiliency_score === undefined ||
|
||||
data.resiliency_score === null
|
||||
? 'NA'
|
||||
: `${data.resiliency_score}%`}
|
||||
</Typography>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import { ButtonFilled } from 'litmus-ui';
|
|||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Workflow } from '../../../../models/graphql/workflowData';
|
||||
import { WorkflowRun } from '../../../../models/graphql/workflowData';
|
||||
import { history } from '../../../../redux/configureStore';
|
||||
import {
|
||||
getProjectID,
|
||||
|
@ -13,7 +13,7 @@ import useStyles from './styles';
|
|||
import { WorkflowRunCard } from './WorkflowRunCard';
|
||||
|
||||
interface RecentWorkflowRunsProps {
|
||||
data: Workflow;
|
||||
data: Partial<WorkflowRun>[];
|
||||
}
|
||||
|
||||
const RecentWorkflowRuns: React.FC<RecentWorkflowRunsProps> = ({ data }) => {
|
||||
|
@ -23,8 +23,6 @@ const RecentWorkflowRuns: React.FC<RecentWorkflowRunsProps> = ({ data }) => {
|
|||
const projectID = getProjectID();
|
||||
const projectRole = getProjectRole();
|
||||
|
||||
const filteredData = data.getWorkFlowRuns.slice(-3).reverse();
|
||||
|
||||
return (
|
||||
<Paper className={classes.workflowRunContainer}>
|
||||
{/* Heading section of the container */}
|
||||
|
@ -59,7 +57,7 @@ const RecentWorkflowRuns: React.FC<RecentWorkflowRunsProps> = ({ data }) => {
|
|||
|
||||
{/* WorkflowRuns Data */}
|
||||
|
||||
{filteredData.map((workflow) => {
|
||||
{data.map((workflow) => {
|
||||
return <WorkflowRunCard key={workflow.workflow_id} data={workflow} />;
|
||||
})}
|
||||
</Paper>
|
||||
|
|
|
@ -35,12 +35,20 @@ const AgentConfiguredHome: React.FC<AgentConfiguredHomeProps> = ({
|
|||
const { data, loading, error } = useQuery<Workflow, WorkflowDataVars>(
|
||||
WORKFLOW_DETAILS,
|
||||
{
|
||||
variables: { projectID },
|
||||
variables: {
|
||||
workflowRunsInput: {
|
||||
project_id: projectID,
|
||||
pagination: {
|
||||
page: 0,
|
||||
limit: 3,
|
||||
},
|
||||
},
|
||||
},
|
||||
fetchPolicy: 'cache-and-network',
|
||||
}
|
||||
);
|
||||
|
||||
let workflowRunCount = 0;
|
||||
const workflowRunCount = data?.getWorkflowRuns.total_no_of_workflow_runs ?? 0;
|
||||
|
||||
if (error) {
|
||||
console.error('Error fetching Workflow Data');
|
||||
|
@ -51,24 +59,14 @@ const AgentConfiguredHome: React.FC<AgentConfiguredHomeProps> = ({
|
|||
);
|
||||
}
|
||||
|
||||
if (data) {
|
||||
workflowRunCount = data.getWorkFlowRuns.length;
|
||||
} else {
|
||||
return (
|
||||
<Center>
|
||||
<Loader />
|
||||
</Center>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div>
|
||||
{loading ? (
|
||||
<Center>
|
||||
<Loader />
|
||||
</Center>
|
||||
) : workflowRunCount > 0 ? (
|
||||
<RecentWorkflowRuns data={data} />
|
||||
) : data && workflowRunCount > 0 ? (
|
||||
<RecentWorkflowRuns data={data.getWorkflowRuns.workflow_runs} />
|
||||
) : (
|
||||
<MainInfoContainer
|
||||
src="./icons/workflowScheduleHome.svg"
|
||||
|
|
|
@ -6,13 +6,17 @@ import { useTranslation } from 'react-i18next';
|
|||
import { useSelector } from 'react-redux';
|
||||
import YAML from 'yaml';
|
||||
import { StyledTab, TabPanel } from '../../../components/Tabs';
|
||||
import { WORKFLOW_DETAILS, WORKFLOW_LOGS } from '../../../graphql';
|
||||
import {
|
||||
WORKFLOW_DETAILS_WITH_EXEC_DATA,
|
||||
WORKFLOW_LOGS,
|
||||
} from '../../../graphql';
|
||||
import {
|
||||
PodLog,
|
||||
PodLogRequest,
|
||||
PodLogVars,
|
||||
} from '../../../models/graphql/podLog';
|
||||
import {
|
||||
ExecutionData,
|
||||
Workflow,
|
||||
WorkflowDataVars,
|
||||
} from '../../../models/graphql/workflowData';
|
||||
|
@ -47,13 +51,18 @@ const LogsSwitcher: React.FC<LogsSwitcherProps> = ({
|
|||
const projectID = getProjectID();
|
||||
|
||||
const { data: workflow_data } = useQuery<Workflow, WorkflowDataVars>(
|
||||
WORKFLOW_DETAILS,
|
||||
{ variables: { projectID } }
|
||||
WORKFLOW_DETAILS_WITH_EXEC_DATA,
|
||||
{
|
||||
variables: {
|
||||
workflowRunsInput: {
|
||||
project_id: projectID,
|
||||
workflow_run_ids: [workflow_run_id],
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const workflow = workflow_data?.getWorkFlowRuns.filter(
|
||||
(w) => w.workflow_run_id === workflow_run_id
|
||||
)[0];
|
||||
const workflow = workflow_data?.getWorkflowRuns.workflow_runs[0];
|
||||
|
||||
const [chaosData, setChaosData] = useState<ChaosDataVar>({
|
||||
exp_pod: '',
|
||||
|
@ -63,7 +72,8 @@ const LogsSwitcher: React.FC<LogsSwitcherProps> = ({
|
|||
|
||||
useEffect(() => {
|
||||
if (workflow !== undefined) {
|
||||
const nodeData = JSON.parse(workflow.execution_data).nodes[pod_name];
|
||||
const nodeData = (JSON.parse(workflow.execution_data) as ExecutionData)
|
||||
.nodes[pod_name];
|
||||
if (nodeData && nodeData.chaosData)
|
||||
setChaosData({
|
||||
exp_pod: nodeData.chaosData.experimentPod,
|
||||
|
@ -83,7 +93,8 @@ const LogsSwitcher: React.FC<LogsSwitcherProps> = ({
|
|||
|
||||
useEffect(() => {
|
||||
if (workflow !== undefined) {
|
||||
const nodeData = JSON.parse(workflow.execution_data).nodes[pod_name];
|
||||
const nodeData = (JSON.parse(workflow.execution_data) as ExecutionData)
|
||||
.nodes[pod_name];
|
||||
if (nodeData?.chaosData?.chaosResult) {
|
||||
setChaosResult(YAML.stringify(nodeData.chaosData?.chaosResult));
|
||||
} else {
|
||||
|
@ -118,8 +129,8 @@ const LogsSwitcher: React.FC<LogsSwitcherProps> = ({
|
|||
}
|
||||
if (
|
||||
workflow !== undefined &&
|
||||
JSON.parse(workflow?.execution_data).nodes[pod_name].type ===
|
||||
'ChaosEngine'
|
||||
(JSON.parse(workflow.execution_data) as ExecutionData).nodes[pod_name]
|
||||
.type === 'ChaosEngine'
|
||||
) {
|
||||
return t('workflowDetailsView.nodeLogs.chaosLogs');
|
||||
}
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
import { Typography } from '@material-ui/core';
|
||||
import { ButtonOutlined } from 'litmus-ui';
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { ButtonOutlined } from 'litmus-ui';
|
||||
import TimePopOver from '../../../components/TimePopOver';
|
||||
import { ExecutionData } from '../../../models/graphql/workflowData';
|
||||
import useStyles from './styles';
|
||||
import TimePopOver from '../../../components/TimePopOver';
|
||||
|
||||
interface WorkflowInfoProps {
|
||||
setIsInfoToggled?: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
tab: number;
|
||||
data: ExecutionData;
|
||||
resiliency_score?: number;
|
||||
cluster_name: string;
|
||||
}
|
||||
|
||||
|
@ -17,6 +18,7 @@ const WorkflowInfo: React.FC<WorkflowInfoProps> = ({
|
|||
setIsInfoToggled,
|
||||
tab,
|
||||
data,
|
||||
resiliency_score,
|
||||
cluster_name,
|
||||
}) => {
|
||||
const classes = useStyles();
|
||||
|
@ -55,10 +57,10 @@ const WorkflowInfo: React.FC<WorkflowInfoProps> = ({
|
|||
{t('workflowDetailsView.workflowInfo.resilienceScore')}
|
||||
</Typography>
|
||||
{/* Static data, will be changed with API response */}
|
||||
<Typography className={classes.resilliencyScore}>
|
||||
{data.resiliency_score === undefined
|
||||
<Typography className={classes.resiliencyScore}>
|
||||
{resiliency_score === undefined || resiliency_score === null
|
||||
? 'NA'
|
||||
: `${data.resiliency_score}%`}
|
||||
: `${resiliency_score}%`}
|
||||
</Typography>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ const useStyles = makeStyles((theme) => ({
|
|||
fontSize: '1rem',
|
||||
},
|
||||
|
||||
resilliencyScore: {
|
||||
resiliencyScore: {
|
||||
color: theme.palette.highlight,
|
||||
fontSize: '1.5rem',
|
||||
},
|
||||
|
|
|
@ -76,7 +76,6 @@ github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4Rq
|
|||
github.com/agnivade/levenshtein v1.0.3 h1:M5ZnqLOoZR8ygVq0FfkXsNOKzMCk0xRiow0R5+5VkQ0=
|
||||
github.com/agnivade/levenshtein v1.0.3/go.mod h1:4SFRZbbXWLF4MU1T9Qg0pGgH3Pjs+t6ie5efyrwRJXs=
|
||||
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
|
||||
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
|
||||
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
|
@ -84,16 +83,13 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF
|
|||
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
||||
github.com/aliyun/aliyun-oss-go-sdk v2.0.6+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
|
||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
|
||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
|
||||
github.com/ant31/crd-validation v0.0.0-20180702145049-30f8a35d0ac2/go.mod h1:X0noFIik9YqfhGYBLEHg8LJKEwy7QIitLQuFMpKLcPk=
|
||||
github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q=
|
||||
github.com/antonmedv/expr v1.8.2/go.mod h1:5qsM3oLGDND7sDmQGDXHkYfkjYMUX14qsgqmHhwGEk8=
|
||||
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
|
||||
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
|
||||
github.com/argoproj/argo v0.0.0-20200806220847-5759a0e198d3 h1:UbCWw+VjeyicEGnFvBIGzOYCKuCqrRUzlxSbzaHcXug=
|
||||
github.com/argoproj/argo v0.0.0-20200806220847-5759a0e198d3/go.mod h1:M0Up9o5uqIZvRh/vh8eJR27s6H+UlkiS1PBUQAIq4Hw=
|
||||
|
@ -102,7 +98,6 @@ github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hC
|
|||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
|
||||
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
|
||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||
|
@ -119,7 +114,6 @@ github.com/bazelbuild/bazel-gazelle v0.0.0-20181012220611-c728ce9f663e/go.mod h1
|
|||
github.com/bazelbuild/buildtools v0.0.0-20180226164855-80c7f0d45d7e/go.mod h1:5JP0TXzWDHXv8qvxRC4InIazwdyDseBDbzESUMKk1yU=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bifurcation/mint v0.0.0-20180715133206-93c51c6ce115/go.mod h1:zVt7zX3K/aDCk9Tj+VM7YymsX66ERvzCJzw8rFCX2JU=
|
||||
|
@ -140,10 +134,8 @@ github.com/cenkalti/backoff v2.1.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QH
|
|||
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/prettybench v0.0.0-20150116022406-03b8cfe5406c/go.mod h1:Xe6ZsFhtM8HrDku0pxJ3/Lr51rwykrzgFwpmTzleatY=
|
||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM=
|
||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5/go.mod h1:/iP1qXHoty45bqomnu2LM+VVyAEdWN+vtSHGlQgyxbw=
|
||||
github.com/checkpoint-restore/go-criu v0.0.0-20190109184317-bdb7599cd87b/go.mod h1:TrMrLQfeENAPYPRsJuq3jsqdlRh3lvi6trTZJG8+tho=
|
||||
|
@ -217,7 +209,6 @@ github.com/denisenkom/go-mssqldb v0.0.0-20190515213511-eb9f6a1743f3/go.mod h1:zA
|
|||
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||
github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c h1:TUuUh0Xgj97tLMNtWtNvI9mIV6isjEb9lBMNv+77IGM=
|
||||
github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
|
||||
github.com/dhui/dktest v0.3.0/go.mod h1:cyzIUfGsBEbZ6BT7tnXqAShHSXCZhSNmFl70sZ7c1yc=
|
||||
github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=
|
||||
|
@ -259,7 +250,6 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
|
|||
github.com/euank/go-kmsg-parser v2.0.0+incompatible/go.mod h1:MhmAMZ8V4CYH4ybgdRwPr2TU5ThnS43puaKEMpja1uw=
|
||||
github.com/evanphx/json-patch v4.1.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/evanphx/json-patch v4.5.0+incompatible h1:ouOWdg56aJriqS0huScTkVXPC5IcNrDCXZ6OoTAWu7M=
|
||||
github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4=
|
||||
github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8=
|
||||
|
@ -269,13 +259,11 @@ github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5Kwzbycv
|
|||
github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||
github.com/fatih/structtag v1.1.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
|
||||
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
||||
github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4=
|
||||
github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/fsouza/fake-gcs-server v1.7.0/go.mod h1:5XIRs4YvwNbNoz+1JF8j6KLAyDh7RHGAyAK3EP2EsNk=
|
||||
github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
|
||||
|
@ -287,7 +275,6 @@ github.com/ghodss/yaml v0.0.0-20180820084758-c7ce16629ff4/go.mod h1:4dBDuWmgqj2H
|
|||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 h1:Mn26/9ZMNWSw9C9ERFA1PUxfmGpolnw2v0bKOREu5ew=
|
||||
github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32/go.mod h1:GIjDIg/heH5DOkXY3YJ/wNhfHsQHoXGjl8G8amsYQ1I=
|
||||
github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=
|
||||
github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
|
||||
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
||||
github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
||||
|
@ -298,7 +285,6 @@ github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=
|
|||
github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
|
||||
github.com/go-git/go-billy/v5 v5.0.0 h1:7NQHvd9FVid8VL4qVUMm8XifBK+2xCoZ2lSk0agRrHM=
|
||||
github.com/go-git/go-billy/v5 v5.0.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12 h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw=
|
||||
github.com/go-git/go-git/v5 v5.2.0 h1:YPBLG/3UK1we1ohRkncLjaXWLW+HKp5QNM/jTli2JgI=
|
||||
github.com/go-git/go-git/v5 v5.2.0/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs=
|
||||
|
@ -312,7 +298,6 @@ github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgO
|
|||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
||||
github.com/go-logr/logr v0.1.0 h1:M1Tv3VzNlEHg6uyACnRdtrploV2P7wZqH8BoQMtz0cg=
|
||||
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
|
||||
github.com/go-logr/zapr v0.1.0/go.mod h1:tabnROwaDl0UNxkVeFRbY8bwB37GwRv0P8lg6aAiEnk=
|
||||
github.com/go-logr/zapr v0.1.1/go.mod h1:tabnROwaDl0UNxkVeFRbY8bwB37GwRv0P8lg6aAiEnk=
|
||||
|
@ -419,7 +404,6 @@ github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4er
|
|||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191027212112-611e8accdfc9/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
|
@ -439,7 +423,6 @@ github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi
|
|||
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||
github.com/golang/protobuf v1.4.0 h1:oOuy+ugB+P/kBdUnG5QaMXSIyJ1q38wWSojYCb3z5VQ=
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM=
|
||||
|
@ -458,9 +441,7 @@ github.com/google/certificate-transparency-go v1.0.21/go.mod h1:QeJfpSbVSfYc7RgB
|
|||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M=
|
||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
|
||||
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
||||
|
@ -552,7 +533,6 @@ github.com/heketi/rest v0.0.0-20180404230133-aa6a65207413/go.mod h1:BeS3M108VzVl
|
|||
github.com/heketi/tests v0.0.0-20151005000721-f3775cbcefd6/go.mod h1:xGMAM8JLi7UkZt1i4FQeQy0R2T8GLUwQhOP5M1gBhy4=
|
||||
github.com/heketi/utils v0.0.0-20170317161834-435bc5bdfa64/go.mod h1:RYlF4ghFZPPmk2TC5REt5OFwvfb6lzxFWrTWB+qs28s=
|
||||
github.com/helm/helm-2to3 v0.2.0/go.mod h1:jQUVAWB0bM7zNIqKPIfHFzuFSK0kHYovJrjO+hqcvRk=
|
||||
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4=
|
||||
github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg=
|
||||
|
@ -585,14 +565,12 @@ github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhB
|
|||
github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||
github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=
|
||||
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
||||
github.com/jpillora/go-ogle-analytics v0.0.0-20161213085824-14b04e0594ef/go.mod h1:PlwhC7q1VSK73InDzdDatVetQrTsQHIbOvcJAZzitY0=
|
||||
github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
|
@ -621,9 +599,7 @@ github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0
|
|||
github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
|
@ -632,7 +608,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
|||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
||||
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kshvakov/clickhouse v1.3.5/go.mod h1:DMzX7FxRymoNkVgizH0DWAL8Cur7wHLgx3MUnGwJqpE=
|
||||
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
|
||||
|
@ -647,6 +622,8 @@ github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z
|
|||
github.com/litmuschaos/chaos-operator v0.0.0-20210126054859-85bb0ad85bfa h1:lBEvg10ZPndmBUMtVaMRVCqeKnKYmjtRSg2SF4iTQ7o=
|
||||
github.com/litmuschaos/chaos-operator v0.0.0-20210126054859-85bb0ad85bfa/go.mod h1:Z2GpYjqXwFd8bx+kv58YEQFxynx1v9PMGCGTQFRVnFQ=
|
||||
github.com/litmuschaos/elves v0.0.0-20201107015738-552d74669e3c/go.mod h1:DsbHGNUq/78NZozWVVI9Q6eBei4I+JjlkkD5aibJ3MQ=
|
||||
github.com/litmuschaos/litmus v0.0.0-20210602074504-1b424623457f h1:CLezzbI5dn/WOXj/cDt9SbKA0cAko7M2tHbWzuvytpo=
|
||||
github.com/litmuschaos/litmus v0.0.0-20210603121521-c77e5d24fea8 h1:HzWuTLL5/LsNlKt7SicBvLxf5Fk5E82SsGjUDqAw4qc=
|
||||
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
|
||||
github.com/lpabon/godbc v0.1.1/go.mod h1:Jo9QV0cf3U6jZABgiJ2skINAXb9j8m51r07g4KI92ZA=
|
||||
github.com/lucas-clemente/aes12 v0.0.0-20171027163421-cd47fb39b79f/go.mod h1:JpH9J1c9oX6otFSgdUHwUBUizmKlrMjxWnIAjff4m04=
|
||||
|
@ -689,7 +666,6 @@ github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m
|
|||
github.com/mattn/go-shellwords v1.0.5/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=
|
||||
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/maxbrunsfeld/counterfeiter/v6 v6.2.1/go.mod h1:F9YacGpnZbLQMzuPI0rR6op21YvNu/RjL705LJJpM3k=
|
||||
github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY=
|
||||
|
@ -735,7 +711,6 @@ github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8m
|
|||
github.com/munnerz/goautoneg v0.0.0-20190414153302-2ae31c8b6b30/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/mvdan/xurls v1.1.0/go.mod h1:tQlNn3BED8bE/15hnSL2HLkDeLWpNPAwtw7wkEq44oU=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
|
||||
github.com/nakagami/firebirdsql v0.0.0-20190310045651-3c02a58cfed8/go.mod h1:86wM1zFnC6/uDBfZGNwB65O+pR2OFi5q/YQaEUid1qA=
|
||||
|
@ -748,7 +723,6 @@ github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzE
|
|||
github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
|
||||
github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
|
@ -759,13 +733,11 @@ github.com/onsi/ginkgo v1.4.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W
|
|||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.10.1 h1:q/mM8GF/n0shIN8SaAZ0V+jnLPzen6WIVZdiwrRlMlo=
|
||||
github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
|
||||
github.com/onsi/gomega v1.3.0/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
|
||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/onsi/gomega v1.7.0 h1:XPnZz8VVBHjVsy1vzJmRwIcSwiUO+JFfrv/xGiigmME=
|
||||
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
||||
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
|
||||
|
@ -818,7 +790,6 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
|||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA=
|
||||
github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||
github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
|
||||
|
@ -840,7 +811,6 @@ github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:
|
|||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
|
@ -863,7 +833,6 @@ github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDa
|
|||
github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
|
||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
|
||||
github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=
|
||||
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
|
||||
github.com/prometheus/prometheus v2.3.2+incompatible/go.mod h1:oAIUtOny2rjMX0OWN5vPR5/q/twIROJvdqnQKDdil/s=
|
||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||
|
@ -949,7 +918,6 @@ github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf
|
|||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/syndtr/gocapability v0.0.0-20160928074757-e7cb7fa329f4/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||
github.com/thecodeteam/goscaleio v0.1.0/go.mod h1:68sdkZAsK8bvEwBlbQnlLS+xU+hvLYM/iQ8KXej1AwM=
|
||||
|
@ -1323,7 +1291,6 @@ google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7
|
|||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc=
|
||||
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190128161407-8ac453e89fca/go.mod h1:L3J43x8/uS+qIUoksaLKe6OS3nUKxOKuIFz1sl2/jx4=
|
||||
|
@ -1376,11 +1343,9 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks
|
|||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/gcfg.v1 v1.2.0/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
|
||||
gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
|
||||
|
@ -1404,11 +1369,9 @@ gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76
|
|||
gopkg.in/square/go-jose.v2 v2.5.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
|
||||
gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=
|
||||
gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98=
|
||||
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0 h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=
|
||||
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g=
|
||||
gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=
|
||||
gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/warnings.v0 v0.1.1/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -197,6 +197,11 @@ type DSResponse struct {
|
|||
UpdatedAt *string `json:"updated_at"`
|
||||
}
|
||||
|
||||
type DateRange struct {
|
||||
StartDate string `json:"start_date"`
|
||||
EndDate *string `json:"end_date"`
|
||||
}
|
||||
|
||||
type ExperimentInput struct {
|
||||
ProjectID string `json:"ProjectID"`
|
||||
ChartName string `json:"ChartName"`
|
||||
|
@ -211,6 +216,19 @@ type Experiments struct {
|
|||
Desc string `json:"Desc"`
|
||||
}
|
||||
|
||||
type GetWorkflowRunsInput struct {
|
||||
ProjectID string `json:"project_id"`
|
||||
WorkflowRunIds []*string `json:"workflow_run_ids"`
|
||||
Pagination *Pagination `json:"pagination"`
|
||||
Sort *SortInput `json:"sort"`
|
||||
Filter *WorkflowRunFilterInput `json:"filter"`
|
||||
}
|
||||
|
||||
type GetWorkflowsOutput struct {
|
||||
TotalNoOfWorkflowRuns int `json:"total_no_of_workflow_runs"`
|
||||
WorkflowRuns []*WorkflowRun `json:"workflow_runs"`
|
||||
}
|
||||
|
||||
type GitConfig struct {
|
||||
ProjectID string `json:"ProjectID"`
|
||||
Branch string `json:"Branch"`
|
||||
|
@ -351,6 +369,11 @@ type PackageInformation struct {
|
|||
Experiments []*Experiments `json:"Experiments"`
|
||||
}
|
||||
|
||||
type Pagination struct {
|
||||
Page int `json:"page"`
|
||||
Limit int `json:"limit"`
|
||||
}
|
||||
|
||||
type PodLog struct {
|
||||
ClusterID *ClusterIdentity `json:"cluster_id"`
|
||||
RequestID string `json:"request_id"`
|
||||
|
@ -415,6 +438,11 @@ type ScheduledWorkflows struct {
|
|||
IsRemoved bool `json:"isRemoved"`
|
||||
}
|
||||
|
||||
type SortInput struct {
|
||||
Field WorkflowRunSortingField `json:"field"`
|
||||
Descending *bool `json:"descending"`
|
||||
}
|
||||
|
||||
type Spec struct {
|
||||
DisplayName string `json:"DisplayName"`
|
||||
CategoryDescription string `json:"CategoryDescription"`
|
||||
|
@ -499,15 +527,26 @@ type Workflow struct {
|
|||
}
|
||||
|
||||
type WorkflowRun struct {
|
||||
WorkflowRunID string `json:"workflow_run_id"`
|
||||
WorkflowID string `json:"workflow_id"`
|
||||
ClusterName string `json:"cluster_name"`
|
||||
LastUpdated string `json:"last_updated"`
|
||||
ProjectID string `json:"project_id"`
|
||||
ClusterID string `json:"cluster_id"`
|
||||
WorkflowName string `json:"workflow_name"`
|
||||
ClusterType *string `json:"cluster_type"`
|
||||
ExecutionData string `json:"execution_data"`
|
||||
WorkflowRunID string `json:"workflow_run_id"`
|
||||
WorkflowID string `json:"workflow_id"`
|
||||
ClusterName string `json:"cluster_name"`
|
||||
LastUpdated string `json:"last_updated"`
|
||||
ProjectID string `json:"project_id"`
|
||||
ClusterID string `json:"cluster_id"`
|
||||
WorkflowName string `json:"workflow_name"`
|
||||
ClusterType *string `json:"cluster_type"`
|
||||
Phase string `json:"phase"`
|
||||
ResiliencyScore *float64 `json:"resiliency_score"`
|
||||
ExperimentsPassed *int `json:"experiments_passed"`
|
||||
TotalExperiments *int `json:"total_experiments"`
|
||||
ExecutionData string `json:"execution_data"`
|
||||
}
|
||||
|
||||
type WorkflowRunFilterInput struct {
|
||||
WorkflowName *string `json:"workflow_name"`
|
||||
ClusterName *string `json:"cluster_name"`
|
||||
WorkflowStatus *WorkflowRunStatus `json:"workflow_status"`
|
||||
DateRange *DateRange `json:"date_range"`
|
||||
}
|
||||
|
||||
type WorkflowRunInput struct {
|
||||
|
@ -828,3 +867,89 @@ func (e *MemberRole) UnmarshalGQL(v interface{}) error {
|
|||
func (e MemberRole) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type WorkflowRunSortingField string
|
||||
|
||||
const (
|
||||
WorkflowRunSortingFieldName WorkflowRunSortingField = "Name"
|
||||
WorkflowRunSortingFieldTime WorkflowRunSortingField = "Time"
|
||||
)
|
||||
|
||||
var AllWorkflowRunSortingField = []WorkflowRunSortingField{
|
||||
WorkflowRunSortingFieldName,
|
||||
WorkflowRunSortingFieldTime,
|
||||
}
|
||||
|
||||
func (e WorkflowRunSortingField) IsValid() bool {
|
||||
switch e {
|
||||
case WorkflowRunSortingFieldName, WorkflowRunSortingFieldTime:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e WorkflowRunSortingField) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *WorkflowRunSortingField) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = WorkflowRunSortingField(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid WorkflowRunSortingField", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e WorkflowRunSortingField) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type WorkflowRunStatus string
|
||||
|
||||
const (
|
||||
WorkflowRunStatusAll WorkflowRunStatus = "All"
|
||||
WorkflowRunStatusFailed WorkflowRunStatus = "Failed"
|
||||
WorkflowRunStatusRunning WorkflowRunStatus = "Running"
|
||||
WorkflowRunStatusSucceeded WorkflowRunStatus = "Succeeded"
|
||||
)
|
||||
|
||||
var AllWorkflowRunStatus = []WorkflowRunStatus{
|
||||
WorkflowRunStatusAll,
|
||||
WorkflowRunStatusFailed,
|
||||
WorkflowRunStatusRunning,
|
||||
WorkflowRunStatusSucceeded,
|
||||
}
|
||||
|
||||
func (e WorkflowRunStatus) IsValid() bool {
|
||||
switch e {
|
||||
case WorkflowRunStatusAll, WorkflowRunStatusFailed, WorkflowRunStatusRunning, WorkflowRunStatusSucceeded:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e WorkflowRunStatus) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *WorkflowRunStatus) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = WorkflowRunStatus(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid WorkflowRunStatus", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e WorkflowRunStatus) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
|
|
@ -114,18 +114,6 @@ type ChaosWorkFlowResponse {
|
|||
isCustomWorkflow: Boolean!
|
||||
}
|
||||
|
||||
type WorkflowRun {
|
||||
workflow_run_id: ID!
|
||||
workflow_id: ID!
|
||||
cluster_name: String!
|
||||
last_updated: String!
|
||||
project_id: ID!
|
||||
cluster_id: ID!
|
||||
workflow_name: String!
|
||||
cluster_type: String
|
||||
execution_data: String!
|
||||
}
|
||||
|
||||
input WorkflowRunInput {
|
||||
workflow_id: ID!
|
||||
workflow_run_id: ID!
|
||||
|
@ -282,8 +270,9 @@ input KubeGVRRequest {
|
|||
}
|
||||
|
||||
type Query {
|
||||
# [Deprecated soon]
|
||||
getWorkFlowRuns(project_id: String!): [WorkflowRun!]! @authorized
|
||||
getWorkflowRuns(
|
||||
workflowRunsInput: GetWorkflowRunsInput!
|
||||
): GetWorkflowsOutput! @authorized
|
||||
|
||||
getCluster(project_id: String!, cluster_type: String): [Cluster!]! @authorized
|
||||
|
||||
|
@ -335,7 +324,10 @@ type Query {
|
|||
#Image Registry Queries
|
||||
ListImageRegistry(project_id: String!): [ImageRegistryResponse!] @authorized
|
||||
|
||||
GetImageRegistry(image_registry_id: String!, project_id: String!): ImageRegistryResponse! @authorized
|
||||
GetImageRegistry(
|
||||
image_registry_id: String!
|
||||
project_id: String!
|
||||
): ImageRegistryResponse! @authorized
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
|
@ -343,8 +335,7 @@ type Mutation {
|
|||
userClusterReg(clusterInput: ClusterInput!): clusterRegResponse! @authorized
|
||||
|
||||
#It is used to create chaosworkflow
|
||||
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse!
|
||||
@authorized
|
||||
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse! @authorized
|
||||
|
||||
reRunChaosWorkFlow(workflowID: String!): String! @authorized
|
||||
|
||||
|
@ -433,9 +424,16 @@ type Mutation {
|
|||
deleteManifestTemplate(template_id: String!): Boolean! @authorized
|
||||
|
||||
#Image Registry Mutations
|
||||
createImageRegistry(project_id: String!, imageRegistryInfo: imageRegistryInput!): ImageRegistryResponse! @authorized
|
||||
createImageRegistry(
|
||||
project_id: String!
|
||||
imageRegistryInfo: imageRegistryInput!
|
||||
): ImageRegistryResponse! @authorized
|
||||
|
||||
updateImageRegistry(image_registry_id: String!, project_id: String!, imageRegistryInfo: imageRegistryInput!): ImageRegistryResponse! @authorized
|
||||
updateImageRegistry(
|
||||
image_registry_id: String!
|
||||
project_id: String!
|
||||
imageRegistryInfo: imageRegistryInput!
|
||||
): ImageRegistryResponse! @authorized
|
||||
|
||||
deleteImageRegistry(image_registry_id: String!, project_id: String!): String! @authorized
|
||||
}
|
||||
|
|
|
@ -295,12 +295,12 @@ func (r *mutationResolver) DeleteImageRegistry(ctx context.Context, imageRegistr
|
|||
return diRegistry, err
|
||||
}
|
||||
|
||||
func (r *queryResolver) GetWorkFlowRuns(ctx context.Context, projectID string) ([]*model.WorkflowRun, error) {
|
||||
err := validate.ValidateRole(ctx, projectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation)
|
||||
func (r *queryResolver) GetWorkflowRuns(ctx context.Context, workflowRunsInput model.GetWorkflowRunsInput) (*model.GetWorkflowsOutput, error) {
|
||||
err := validate.ValidateRole(ctx, workflowRunsInput.ProjectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return wfHandler.QueryWorkflowRuns(projectID)
|
||||
return wfHandler.QueryWorkflowRuns(workflowRunsInput)
|
||||
}
|
||||
|
||||
func (r *queryResolver) GetCluster(ctx context.Context, projectID string, clusterType *string) ([]*model.Cluster, error) {
|
||||
|
@ -346,11 +346,7 @@ func (r *queryResolver) ListWorkflow(ctx context.Context, projectID string, work
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(workflowIds) == 0 {
|
||||
return wfHandler.QueryListWorkflow(projectID)
|
||||
} else {
|
||||
return wfHandler.QueryListWorkflowByIDs(workflowIds)
|
||||
}
|
||||
return wfHandler.QueryListWorkflow(projectID, workflowIds)
|
||||
}
|
||||
|
||||
func (r *queryResolver) GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error) {
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
enum WorkflowRunStatus {
|
||||
All
|
||||
Failed
|
||||
Running
|
||||
Succeeded
|
||||
}
|
||||
|
||||
input DateRange {
|
||||
start_date: String!
|
||||
end_date: String
|
||||
}
|
||||
|
||||
input WorkflowRunFilterInput {
|
||||
workflow_name: String
|
||||
cluster_name: String
|
||||
workflow_status: WorkflowRunStatus
|
||||
date_range: DateRange
|
||||
}
|
||||
|
||||
input Pagination {
|
||||
page: Int!
|
||||
limit: Int!
|
||||
}
|
||||
|
||||
enum WorkflowRunSortingField {
|
||||
Name
|
||||
Time
|
||||
}
|
||||
|
||||
input SortInput {
|
||||
field: WorkflowRunSortingField!
|
||||
descending: Boolean
|
||||
}
|
||||
|
||||
input GetWorkflowRunsInput {
|
||||
project_id: ID!
|
||||
workflow_run_ids: [ID]
|
||||
pagination: Pagination
|
||||
sort: SortInput
|
||||
filter: WorkflowRunFilterInput
|
||||
}
|
||||
|
||||
type WorkflowRun {
|
||||
workflow_run_id: ID!
|
||||
workflow_id: ID!
|
||||
cluster_name: String!
|
||||
last_updated: String!
|
||||
project_id: ID!
|
||||
cluster_id: ID!
|
||||
workflow_name: String!
|
||||
cluster_type: String
|
||||
phase: String!
|
||||
resiliency_score: Float
|
||||
experiments_passed: Int
|
||||
total_experiments: Int
|
||||
execution_data: String!
|
||||
}
|
||||
|
||||
type GetWorkflowsOutput {
|
||||
total_no_of_workflow_runs: Int!
|
||||
workflow_runs: [WorkflowRun]!
|
||||
}
|
|
@ -4,11 +4,14 @@ import (
|
|||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
|
||||
|
@ -17,6 +20,7 @@ import (
|
|||
|
||||
"github.com/google/uuid"
|
||||
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model"
|
||||
types "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/chaos-workflow"
|
||||
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/chaos-workflow/ops"
|
||||
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster"
|
||||
store "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/data-store"
|
||||
|
@ -115,37 +119,251 @@ func UpdateWorkflow(ctx context.Context, input *model.ChaosWorkFlowInput, r *sto
|
|||
}, nil
|
||||
}
|
||||
|
||||
// GetWorkflowRuns sends all the workflow runs for a project from the DB
|
||||
func QueryWorkflowRuns(project_id string) ([]*model.WorkflowRun, error) {
|
||||
workflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := []*model.WorkflowRun{}
|
||||
// QueryWorkflowRuns sends all the workflow runs for a project from the DB
|
||||
func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOutput, error) {
|
||||
var pipeline mongo.Pipeline
|
||||
|
||||
for _, workflow := range workflows {
|
||||
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// Match with projectID
|
||||
matchStage := bson.D{
|
||||
{"$match", bson.D{
|
||||
{"project_id", input.ProjectID},
|
||||
}},
|
||||
}
|
||||
pipeline = append(pipeline, matchStage)
|
||||
|
||||
includeAllFromWorkflow := bson.D{
|
||||
{"workflow_id", 1},
|
||||
{"workflow_name", 1},
|
||||
{"workflow_manifest", 1},
|
||||
{"cronSyntax", 1},
|
||||
{"workflow_description", 1},
|
||||
{"weightages", 1},
|
||||
{"isCustomWorkflow", 1},
|
||||
{"updated_at", 1},
|
||||
{"created_at", 1},
|
||||
{"project_id", 1},
|
||||
{"cluster_id", 1},
|
||||
{"cluster_name", 1},
|
||||
{"cluster_type", 1},
|
||||
{"isRemoved", 1},
|
||||
}
|
||||
|
||||
// Match the pipelineIds from the input array
|
||||
if len(input.WorkflowRunIds) != 0 {
|
||||
matchWfRunIdStage := bson.D{
|
||||
{"$project", append(includeAllFromWorkflow,
|
||||
bson.E{Key: "workflow_runs", Value: bson.D{
|
||||
{"$filter", bson.D{
|
||||
{"input", "$workflow_runs"},
|
||||
{"as", "wfRun"},
|
||||
{"cond", bson.D{
|
||||
{"$in", bson.A{"$$wfRun.workflow_run_id", input.WorkflowRunIds}},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
)},
|
||||
}
|
||||
for _, wfrun := range workflow.WorkflowRuns {
|
||||
newWorkflowRun := model.WorkflowRun{
|
||||
WorkflowName: workflow.WorkflowName,
|
||||
WorkflowID: workflow.WorkflowID,
|
||||
WorkflowRunID: wfrun.WorkflowRunID,
|
||||
LastUpdated: wfrun.LastUpdated,
|
||||
ProjectID: workflow.ProjectID,
|
||||
ClusterID: workflow.ClusterID,
|
||||
ExecutionData: wfrun.ExecutionData,
|
||||
ClusterName: cluster.ClusterName,
|
||||
ClusterType: &cluster.ClusterType,
|
||||
|
||||
pipeline = append(pipeline, matchWfRunIdStage)
|
||||
}
|
||||
|
||||
// Filtering based on multiple parameters
|
||||
if input.Filter != nil {
|
||||
|
||||
// Filtering based on workflow name
|
||||
if input.Filter.WorkflowName != nil && *input.Filter.WorkflowName != "" {
|
||||
matchWfNameStage := bson.D{
|
||||
{"$match", bson.D{
|
||||
{"workflow_name", bson.D{
|
||||
{"$regex", input.Filter.WorkflowName},
|
||||
}},
|
||||
}},
|
||||
}
|
||||
result = append(result, &newWorkflowRun)
|
||||
pipeline = append(pipeline, matchWfNameStage)
|
||||
}
|
||||
|
||||
// Filtering based on cluster name
|
||||
if input.Filter.ClusterName != nil && *input.Filter.ClusterName != "All" && *input.Filter.ClusterName != "" {
|
||||
matchClusterStage := bson.D{
|
||||
{"$match", bson.D{
|
||||
{"cluster_name", input.Filter.ClusterName},
|
||||
}},
|
||||
}
|
||||
pipeline = append(pipeline, matchClusterStage)
|
||||
}
|
||||
|
||||
// Filtering based on phase
|
||||
if input.Filter.WorkflowStatus != nil && *input.Filter.WorkflowStatus != "All" && *input.Filter.WorkflowStatus != "" {
|
||||
filterWfRunPhaseStage := bson.D{
|
||||
{"$project", append(includeAllFromWorkflow,
|
||||
bson.E{Key: "workflow_runs", Value: bson.D{
|
||||
{"$filter", bson.D{
|
||||
{"input", "$workflow_runs"},
|
||||
{"as", "wfRun"},
|
||||
{"cond", bson.D{
|
||||
{"$eq", bson.A{"$$wfRun.phase", string(*input.Filter.WorkflowStatus)}},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
)},
|
||||
}
|
||||
|
||||
pipeline = append(pipeline, filterWfRunPhaseStage)
|
||||
}
|
||||
|
||||
// Filtering based on date range
|
||||
if input.Filter.DateRange != nil {
|
||||
endDate := string(time.Now().Unix())
|
||||
if input.Filter.DateRange.EndDate != nil {
|
||||
endDate = *input.Filter.DateRange.EndDate
|
||||
}
|
||||
filterWfRunDateStage := bson.D{
|
||||
{"$project", append(includeAllFromWorkflow,
|
||||
bson.E{Key: "workflow_runs", Value: bson.D{
|
||||
{"$filter", bson.D{
|
||||
{"input", "$workflow_runs"},
|
||||
{"as", "wfRun"},
|
||||
{"cond", bson.D{
|
||||
{"$and", bson.A{
|
||||
bson.D{{"$lte", bson.A{"$$wfRun.last_updated", endDate}}},
|
||||
bson.D{{"$gte", bson.A{"$$wfRun.last_updated", input.Filter.DateRange.StartDate}}},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
)},
|
||||
}
|
||||
|
||||
pipeline = append(pipeline, filterWfRunDateStage)
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
|
||||
// Flatten out the workflow runs
|
||||
unwindStage := bson.D{
|
||||
{"$unwind", bson.D{
|
||||
{"path", "$workflow_runs"},
|
||||
}},
|
||||
}
|
||||
pipeline = append(pipeline, unwindStage)
|
||||
|
||||
var sortStage bson.D
|
||||
|
||||
switch {
|
||||
case input.Sort != nil && input.Sort.Field == model.WorkflowRunSortingFieldTime:
|
||||
// Sorting based on LastUpdated time
|
||||
if input.Sort.Descending != nil && *input.Sort.Descending {
|
||||
sortStage = bson.D{
|
||||
{"$sort", bson.D{
|
||||
{"workflow_runs.last_updated", -1},
|
||||
}},
|
||||
}
|
||||
} else {
|
||||
sortStage = bson.D{
|
||||
{"$sort", bson.D{
|
||||
{"workflow_runs.last_updated", 1},
|
||||
}},
|
||||
}
|
||||
}
|
||||
case input.Sort != nil && input.Sort.Field == model.WorkflowRunSortingFieldName:
|
||||
// Sorting based on WorkflowName time
|
||||
if input.Sort.Descending != nil && *input.Sort.Descending {
|
||||
sortStage = bson.D{
|
||||
{"$sort", bson.D{
|
||||
{"workflow_name", -1},
|
||||
}},
|
||||
}
|
||||
} else {
|
||||
sortStage = bson.D{
|
||||
{"$sort", bson.D{
|
||||
{"workflow_name", 1},
|
||||
}},
|
||||
}
|
||||
}
|
||||
default:
|
||||
// Default sorting: sorts it by LastUpdated time in descending order
|
||||
sortStage = bson.D{
|
||||
{"$sort", bson.D{
|
||||
{"workflow_runs.last_updated", -1},
|
||||
}},
|
||||
}
|
||||
}
|
||||
|
||||
// Pagination
|
||||
paginatedWorkflows := bson.A{
|
||||
sortStage,
|
||||
}
|
||||
|
||||
if input.Pagination != nil {
|
||||
paginationSkipStage := bson.D{
|
||||
{"$skip", input.Pagination.Page * input.Pagination.Limit},
|
||||
}
|
||||
paginationLimitStage := bson.D{
|
||||
{"$limit", input.Pagination.Limit},
|
||||
}
|
||||
|
||||
paginatedWorkflows = append(paginatedWorkflows, paginationSkipStage, paginationLimitStage)
|
||||
}
|
||||
|
||||
// Add two stages where we first count the number of filtered workflow and then paginate the results
|
||||
facetStage := bson.D{
|
||||
{"$facet", bson.D{
|
||||
{"total_filtered_workflow_runs", bson.A{
|
||||
bson.D{{"$count", "count"}},
|
||||
}},
|
||||
{"flattened_workflow_runs", paginatedWorkflows},
|
||||
}},
|
||||
}
|
||||
pipeline = append(pipeline, facetStage)
|
||||
|
||||
// Call aggregation on pipeline
|
||||
workflowsCursor, err := dbOperationsWorkflow.GetAggregateWorkflows(pipeline)
|
||||
|
||||
var result []*model.WorkflowRun
|
||||
|
||||
var workflows []dbSchemaWorkflow.AggregatedWorkflowRuns
|
||||
|
||||
if err = workflowsCursor.All(context.Background(), &workflows); err != nil {
|
||||
fmt.Println(err)
|
||||
return &model.GetWorkflowsOutput{
|
||||
TotalNoOfWorkflowRuns: 0,
|
||||
WorkflowRuns: result,
|
||||
}, nil
|
||||
}
|
||||
|
||||
for _, workflow := range workflows[0].FlattenedWorkflowRuns {
|
||||
workflowRun := workflow.WorkflowRuns
|
||||
|
||||
newWorkflowRun := model.WorkflowRun{
|
||||
WorkflowName: workflow.WorkflowName,
|
||||
WorkflowID: workflow.WorkflowID,
|
||||
WorkflowRunID: workflowRun.WorkflowRunID,
|
||||
LastUpdated: workflowRun.LastUpdated,
|
||||
ProjectID: workflow.ProjectID,
|
||||
ClusterID: workflow.ClusterID,
|
||||
Phase: workflowRun.Phase,
|
||||
ResiliencyScore: workflowRun.ResiliencyScore,
|
||||
ExperimentsPassed: workflowRun.ExperimentsPassed,
|
||||
TotalExperiments: workflowRun.TotalExperiments,
|
||||
ExecutionData: workflowRun.ExecutionData,
|
||||
ClusterName: workflow.ClusterName,
|
||||
ClusterType: &workflow.ClusterType,
|
||||
}
|
||||
result = append(result, &newWorkflowRun)
|
||||
}
|
||||
|
||||
totalFilteredWorkflowRuns := 0
|
||||
if len(workflows) > 0 && len(workflows[0].TotalFilteredWorkflowRuns) > 0 {
|
||||
totalFilteredWorkflowRuns = workflows[0].TotalFilteredWorkflowRuns[0].Count
|
||||
}
|
||||
|
||||
output := model.GetWorkflowsOutput{
|
||||
TotalNoOfWorkflowRuns: totalFilteredWorkflowRuns,
|
||||
WorkflowRuns: result,
|
||||
}
|
||||
return &output, nil
|
||||
}
|
||||
|
||||
// Deprecated
|
||||
func QueryWorkflows(project_id string) ([]*model.ScheduledWorkflows, error) {
|
||||
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}})
|
||||
if err != nil {
|
||||
|
@ -186,22 +404,32 @@ func QueryWorkflows(project_id string) ([]*model.ScheduledWorkflows, error) {
|
|||
return result, nil
|
||||
}
|
||||
|
||||
func QueryListWorkflow(project_id string) ([]*model.Workflow, error) {
|
||||
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}})
|
||||
// QueryListWorkflow returns all the workflows present in the given project
|
||||
func QueryListWorkflow(project_id string, workflowIds []*string) ([]*model.Workflow, error) {
|
||||
var query bson.D
|
||||
if len(workflowIds) != 0 {
|
||||
query = bson.D{
|
||||
{"project_id", project_id},
|
||||
{"workflow_id", bson.M{"$in": workflowIds}},
|
||||
}
|
||||
} else {
|
||||
query = bson.D{
|
||||
{"project_id", project_id},
|
||||
}
|
||||
}
|
||||
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(query)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := []*model.Workflow{}
|
||||
var result []*model.Workflow
|
||||
for _, workflow := range chaosWorkflows {
|
||||
|
||||
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var Weightages []*model.Weightages
|
||||
copier.Copy(&Weightages, &workflow.Weightages)
|
||||
|
||||
var WorkflowRuns []*model.WorkflowRuns
|
||||
copier.Copy(&WorkflowRuns, &workflow.WorkflowRuns)
|
||||
|
||||
|
@ -228,68 +456,37 @@ func QueryListWorkflow(project_id string) ([]*model.Workflow, error) {
|
|||
return result, nil
|
||||
}
|
||||
|
||||
func QueryListWorkflowByIDs(workflow_ids []*string) ([]*model.Workflow, error) {
|
||||
|
||||
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"workflow_id", bson.M{"$in": workflow_ids}}})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := []*model.Workflow{}
|
||||
|
||||
for _, workflow := range chaosWorkflows {
|
||||
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var Weightages []*model.Weightages
|
||||
copier.Copy(&Weightages, &workflow.Weightages)
|
||||
|
||||
var WorkflowRuns []*model.WorkflowRuns
|
||||
copier.Copy(&WorkflowRuns, &workflow.WorkflowRuns)
|
||||
|
||||
newChaosWorkflows := model.Workflow{
|
||||
WorkflowType: string(workflow.WorkflowType),
|
||||
WorkflowID: workflow.WorkflowID,
|
||||
WorkflowManifest: workflow.WorkflowManifest,
|
||||
WorkflowName: workflow.WorkflowName,
|
||||
CronSyntax: workflow.CronSyntax,
|
||||
WorkflowDescription: workflow.WorkflowDescription,
|
||||
Weightages: Weightages,
|
||||
IsCustomWorkflow: workflow.IsCustomWorkflow,
|
||||
UpdatedAt: workflow.UpdatedAt,
|
||||
CreatedAt: workflow.CreatedAt,
|
||||
ProjectID: workflow.ProjectID,
|
||||
ClusterName: cluster.ClusterName,
|
||||
ClusterID: cluster.ClusterID,
|
||||
ClusterType: cluster.ClusterType,
|
||||
WorkflowRuns: WorkflowRuns,
|
||||
}
|
||||
result = append(result, &newChaosWorkflows)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// WorkFlowRunHandler Updates or Inserts a new Workflow Run into the DB
|
||||
func WorkFlowRunHandler(input model.WorkflowRunInput, r store.StateData) (string, error) {
|
||||
cluster, err := cluster.VerifyCluster(*input.ClusterID)
|
||||
if err != nil {
|
||||
log.Print("ERROR", err)
|
||||
log.Println("ERROR", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Parse and store execution data
|
||||
var executionData types.ExecutionData
|
||||
err = json.Unmarshal([]byte(input.ExecutionData), &executionData)
|
||||
if err != nil {
|
||||
log.Println("Can not parse Execution Data of workflow run with id: ", input.WorkflowRunID)
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Resiliency Score will be calculated only if workflow execution is completed
|
||||
if input.Completed {
|
||||
input.ExecutionData = ops.ResiliencyScoreCalculator(input.ExecutionData, input.WorkflowID)
|
||||
executionData = ops.ResiliencyScoreCalculator(executionData, input.WorkflowID)
|
||||
}
|
||||
|
||||
// err = dbOperationsWorkflow.UpdateWorkflowRun(dbOperationsWorkflow.WorkflowRun(newWorkflowRun))
|
||||
count, err := dbOperationsWorkflow.UpdateWorkflowRun(input.WorkflowID, dbSchemaWorkflow.ChaosWorkflowRun{
|
||||
WorkflowRunID: input.WorkflowRunID,
|
||||
LastUpdated: strconv.FormatInt(time.Now().Unix(), 10),
|
||||
ExecutionData: input.ExecutionData,
|
||||
Completed: input.Completed,
|
||||
count := 0
|
||||
count, err = dbOperationsWorkflow.UpdateWorkflowRun(input.WorkflowID, dbSchemaWorkflow.ChaosWorkflowRun{
|
||||
WorkflowRunID: input.WorkflowRunID,
|
||||
LastUpdated: strconv.FormatInt(time.Now().Unix(), 10),
|
||||
Phase: executionData.Phase,
|
||||
ResiliencyScore: &executionData.ResiliencyScore,
|
||||
ExperimentsPassed: &executionData.ExperimentsPassed,
|
||||
TotalExperiments: &executionData.TotalExperiments,
|
||||
ExecutionData: input.ExecutionData,
|
||||
Completed: input.Completed,
|
||||
})
|
||||
if err != nil {
|
||||
log.Print("ERROR", err)
|
||||
|
@ -301,14 +498,18 @@ func WorkFlowRunHandler(input model.WorkflowRunInput, r store.StateData) (string
|
|||
}
|
||||
|
||||
ops.SendWorkflowEvent(model.WorkflowRun{
|
||||
ClusterID: cluster.ClusterID,
|
||||
ClusterName: cluster.ClusterName,
|
||||
ProjectID: cluster.ProjectID,
|
||||
LastUpdated: strconv.FormatInt(time.Now().Unix(), 10),
|
||||
WorkflowRunID: input.WorkflowRunID,
|
||||
WorkflowName: input.WorkflowName,
|
||||
ExecutionData: input.ExecutionData,
|
||||
WorkflowID: input.WorkflowID,
|
||||
ClusterID: cluster.ClusterID,
|
||||
ClusterName: cluster.ClusterName,
|
||||
ProjectID: cluster.ProjectID,
|
||||
LastUpdated: strconv.FormatInt(time.Now().Unix(), 10),
|
||||
WorkflowRunID: input.WorkflowRunID,
|
||||
WorkflowName: input.WorkflowName,
|
||||
Phase: executionData.Phase,
|
||||
ResiliencyScore: &executionData.ResiliencyScore,
|
||||
ExperimentsPassed: &executionData.ExperimentsPassed,
|
||||
TotalExperiments: &executionData.TotalExperiments,
|
||||
ExecutionData: input.ExecutionData,
|
||||
WorkflowID: input.WorkflowID,
|
||||
}, &r)
|
||||
|
||||
return "Workflow Run Accepted", nil
|
||||
|
|
|
@ -14,6 +14,7 @@ import (
|
|||
"github.com/jinzhu/copier"
|
||||
chaosTypes "github.com/litmuschaos/chaos-operator/pkg/apis/litmuschaos/v1alpha1"
|
||||
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model"
|
||||
types "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/chaos-workflow"
|
||||
clusterOps "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster"
|
||||
clusterHandler "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/cluster/handler"
|
||||
store "github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/data-store"
|
||||
|
@ -26,48 +27,6 @@ import (
|
|||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
)
|
||||
|
||||
type WorkflowEvent struct {
|
||||
WorkflowID string `json:"-"`
|
||||
EventType string `json:"event_type"`
|
||||
UID string `json:"-"`
|
||||
Namespace string `json:"namespace"`
|
||||
Name string `json:"name"`
|
||||
CreationTimestamp string `json:"creationTimestamp"`
|
||||
Phase string `json:"phase"`
|
||||
Message string `json:"message"`
|
||||
StartedAt string `json:"startedAt"`
|
||||
FinishedAt string `json:"finishedAt"`
|
||||
Nodes map[string]Node `json:"nodes"`
|
||||
}
|
||||
|
||||
// each node/step data
|
||||
type Node struct {
|
||||
Name string `json:"name"`
|
||||
Phase string `json:"phase"`
|
||||
Message string `json:"message"`
|
||||
StartedAt string `json:"startedAt"`
|
||||
FinishedAt string `json:"finishedAt"`
|
||||
Children []string `json:"children"`
|
||||
Type string `json:"type"`
|
||||
ChaosExp *ChaosData `json:"chaosData,omitempty"`
|
||||
}
|
||||
|
||||
// chaos data
|
||||
type ChaosData struct {
|
||||
EngineUID string `json:"engineUID"`
|
||||
EngineName string `json:"engineName"`
|
||||
Namespace string `json:"namespace"`
|
||||
ExperimentName string `json:"experimentName"`
|
||||
ExperimentStatus string `json:"experimentStatus"`
|
||||
LastUpdatedAt string `json:"lastUpdatedAt"`
|
||||
ExperimentVerdict string `json:"experimentVerdict"`
|
||||
ExperimentPod string `json:"experimentPod"`
|
||||
RunnerPod string `json:"runnerPod"`
|
||||
ProbeSuccessPercentage string `json:"probeSuccessPercentage"`
|
||||
FailStep string `json:"failStep"`
|
||||
ChaosResult *chaosTypes.ChaosResult `json:"chaosResult"`
|
||||
}
|
||||
|
||||
// ProcessWorkflow takes the workflow and processes it as required
|
||||
func ProcessWorkflow(workflow *model.ChaosWorkFlowInput) (*model.ChaosWorkFlowInput, *dbSchemaWorkflow.ChaosWorkflowType, error) {
|
||||
// security check for cluster access
|
||||
|
@ -146,6 +105,12 @@ func ProcessWorkflowCreation(input *model.ChaosWorkFlowInput, wfType *dbSchemaWo
|
|||
copier.Copy(&Weightages, &input.Weightages)
|
||||
}
|
||||
|
||||
// Get cluster information
|
||||
cluster, err := dbOperationsCluster.GetCluster(input.ClusterID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
newChaosWorkflow := dbSchemaWorkflow.ChaosWorkFlowInput{
|
||||
WorkflowID: *input.WorkflowID,
|
||||
WorkflowManifest: input.WorkflowManifest,
|
||||
|
@ -156,6 +121,8 @@ func ProcessWorkflowCreation(input *model.ChaosWorkFlowInput, wfType *dbSchemaWo
|
|||
IsCustomWorkflow: input.IsCustomWorkflow,
|
||||
ProjectID: input.ProjectID,
|
||||
ClusterID: input.ClusterID,
|
||||
ClusterName: cluster.ClusterName,
|
||||
ClusterType: cluster.ClusterType,
|
||||
Weightages: Weightages,
|
||||
CreatedAt: strconv.FormatInt(time.Now().Unix(), 10),
|
||||
UpdatedAt: strconv.FormatInt(time.Now().Unix(), 10),
|
||||
|
@ -163,7 +130,7 @@ func ProcessWorkflowCreation(input *model.ChaosWorkFlowInput, wfType *dbSchemaWo
|
|||
IsRemoved: false,
|
||||
}
|
||||
|
||||
err := dbOperationsWorkflow.InsertChaosWorkflow(newChaosWorkflow)
|
||||
err = dbOperationsWorkflow.InsertChaosWorkflow(newChaosWorkflow)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -249,18 +216,20 @@ func SendWorkflowEvent(wfRun model.WorkflowRun, r *store.StateData) {
|
|||
r.Mutex.Unlock()
|
||||
}
|
||||
|
||||
// ResiliencyScoreCalculator calculates the Rscore and returns the execdata string
|
||||
func ResiliencyScoreCalculator(execData string, wfid string) string {
|
||||
var resiliency_score, weightSum, totalTestResult, totalExperiments, totalExperimentsPassed int = 0, 0, 0, 0, 0
|
||||
var jsonData WorkflowEvent
|
||||
json.Unmarshal([]byte(execData), &jsonData)
|
||||
// ResiliencyScoreCalculator calculates the Resiliency Score and returns the updated ExecutionData
|
||||
func ResiliencyScoreCalculator(execData types.ExecutionData, wfid string) types.ExecutionData {
|
||||
var resiliencyScore float64 = 0.0
|
||||
var weightSum, totalTestResult, totalExperiments, totalExperimentsPassed int = 0, 0, 0, 0
|
||||
|
||||
chaosWorkflows, _ := dbOperationsWorkflow.GetWorkflows(bson.D{{"workflow_id", bson.M{"$in": []string{wfid}}}})
|
||||
|
||||
totalExperiments = len(chaosWorkflows[0].Weightages)
|
||||
weightMap := map[string]int{}
|
||||
for _, weightEnty := range chaosWorkflows[0].Weightages {
|
||||
weightMap[weightEnty.ExperimentName] = weightEnty.Weightage
|
||||
}
|
||||
for _, value := range jsonData.Nodes {
|
||||
|
||||
for _, value := range execData.Nodes {
|
||||
if value.Type == "ChaosEngine" {
|
||||
if value.ChaosExp == nil {
|
||||
continue
|
||||
|
@ -276,12 +245,14 @@ func ResiliencyScoreCalculator(execData string, wfid string) string {
|
|||
}
|
||||
}
|
||||
}
|
||||
if weightSum == 0 {
|
||||
resiliency_score = 0
|
||||
} else {
|
||||
resiliency_score = (totalTestResult / weightSum)
|
||||
if weightSum != 0 {
|
||||
resiliencyScore = float64(totalTestResult) / float64(weightSum)
|
||||
}
|
||||
execData = "{" + `"resiliency_score":` + `"` + strconv.Itoa(resiliency_score) + `",` + `"experiments_passed":` + `"` + strconv.Itoa(totalExperimentsPassed) + `",` + `"total_experiments":` + `"` + strconv.Itoa(totalExperiments) + `",` + execData[1:]
|
||||
|
||||
execData.ResiliencyScore = resiliencyScore
|
||||
execData.ExperimentsPassed = totalExperimentsPassed
|
||||
execData.TotalExperiments = totalExperiments
|
||||
|
||||
return execData
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
package chaos_workflow
|
||||
|
||||
import chaosTypes "github.com/litmuschaos/chaos-operator/pkg/apis/litmuschaos/v1alpha1"
|
||||
|
||||
type ExecutionData struct {
|
||||
WorkflowType string `json:"workflow_type"`
|
||||
WorkflowID string `json:"-"`
|
||||
EventType string `json:"event_type"`
|
||||
UID string `json:"-"`
|
||||
Namespace string `json:"namespace"`
|
||||
Name string `json:"name"`
|
||||
CreationTimestamp string `json:"creationTimestamp"`
|
||||
Phase string `json:"phase"`
|
||||
Message string `json:"message"`
|
||||
StartedAt string `json:"startedAt"`
|
||||
FinishedAt string `json:"finishedAt"`
|
||||
Nodes map[string]Node `json:"nodes"`
|
||||
ResiliencyScore float64 `json:"resiliency_score,string,omitempty"`
|
||||
ExperimentsPassed int `json:"experiments_passed,string,omitempty"`
|
||||
TotalExperiments int `json:"total_experiments,string,omitempty"`
|
||||
}
|
||||
|
||||
// Node represents each node/step data
|
||||
type Node struct {
|
||||
Name string `json:"name"`
|
||||
Phase string `json:"phase"`
|
||||
Message string `json:"message"`
|
||||
StartedAt string `json:"startedAt"`
|
||||
FinishedAt string `json:"finishedAt"`
|
||||
Children []string `json:"children"`
|
||||
Type string `json:"type"`
|
||||
ChaosExp *ChaosData `json:"chaosData,omitempty"`
|
||||
}
|
||||
|
||||
// ChaosData is the data we get from chaos exporter
|
||||
type ChaosData struct {
|
||||
EngineUID string `json:"engineUID"`
|
||||
EngineName string `json:"engineName"`
|
||||
Namespace string `json:"namespace"`
|
||||
ExperimentName string `json:"experimentName"`
|
||||
ExperimentStatus string `json:"experimentStatus"`
|
||||
LastUpdatedAt string `json:"lastUpdatedAt"`
|
||||
ExperimentVerdict string `json:"experimentVerdict"`
|
||||
ExperimentPod string `json:"experimentPod"`
|
||||
RunnerPod string `json:"runnerPod"`
|
||||
ProbeSuccessPercentage string `json:"probeSuccessPercentage"`
|
||||
FailStep string `json:"failStep"`
|
||||
ChaosResult *chaosTypes.ChaosResult `json:"chaosResult"`
|
||||
}
|
|
@ -20,29 +20,10 @@ type MongoOperator interface {
|
|||
Replace(ctx context.Context, collectionType int, query bson.D, replacement interface{}) (*mongo.UpdateResult, error)
|
||||
Delete(ctx context.Context, collectionType int, query bson.D, opts ...*options.DeleteOptions) (*mongo.DeleteResult, error)
|
||||
CountDocuments(ctx context.Context, collectionType int, query bson.D, opts ...*options.CountOptions) (int64, error)
|
||||
Aggregate(ctx context.Context, collectionType int, pipeline interface{}, opts ...*options.AggregateOptions) (*mongo.Cursor, error)
|
||||
GetCollection(collectionType int) (*mongo.Collection, error)
|
||||
}
|
||||
|
||||
type CollectionInterface interface {
|
||||
InsertOne(ctx context.Context, document interface{},
|
||||
opts ...*options.InsertOneOptions) (*mongo.InsertOneResult, error)
|
||||
InsertMany(ctx context.Context, document interface{},
|
||||
opts ...*options.InsertOneOptions) (*mongo.InsertOneResult, error)
|
||||
FindOne(ctx context.Context, filter interface{},
|
||||
opts ...*options.FindOneOptions) *mongo.SingleResult
|
||||
Find(ctx context.Context, filter interface{},
|
||||
opts ...*options.FindOptions) (*mongo.Cursor, error)
|
||||
UpdateOne(ctx context.Context, filter interface{}, update interface{},
|
||||
opts ...*options.UpdateOptions) (*mongo.UpdateResult, error)
|
||||
UpdateMany(ctx context.Context, filter interface{}, update interface{},
|
||||
opts ...*options.UpdateOptions) (*mongo.UpdateResult, error)
|
||||
ReplaceOne(ctx context.Context, filter interface{},
|
||||
replacement interface{}, opts ...*options.ReplaceOptions) (*mongo.UpdateResult, error)
|
||||
DeleteOne(ctx context.Context, filter interface{},
|
||||
opts ...*options.DeleteOptions) (*mongo.DeleteResult, error)
|
||||
CountDocuments(ctx context.Context, filter interface{}, opts ...*options.CountOptions) (int64, error)
|
||||
}
|
||||
|
||||
type MongoOperations struct{}
|
||||
|
||||
var (
|
||||
|
@ -171,6 +152,18 @@ func (m *MongoOperations) CountDocuments(ctx context.Context, collectionType int
|
|||
return result, nil
|
||||
}
|
||||
|
||||
func (m *MongoOperations) Aggregate(ctx context.Context, collectionType int, pipeline interface{}, opts ...*options.AggregateOptions) (*mongo.Cursor, error) {
|
||||
collection, err := m.GetCollection(collectionType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result, err := collection.Aggregate(ctx, pipeline, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetCollection fetches the correct collection based on the collection type
|
||||
func (m *MongoOperations) GetCollection(collectionType int) (*mongo.Collection, error) {
|
||||
return GetCollectionClient.getCollection(collectionType)
|
||||
|
|
|
@ -5,6 +5,8 @@ import (
|
|||
"errors"
|
||||
"time"
|
||||
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
|
||||
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/pkg/database/mongodb"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
)
|
||||
|
@ -52,6 +54,10 @@ func UpdateWorkflowRun(workflowID string, wfRun ChaosWorkflowRun) (int, error) {
|
|||
update := bson.D{
|
||||
{"$set", bson.D{
|
||||
{"workflow_runs.$.last_updated", wfRun.LastUpdated},
|
||||
{"workflow_runs.$.phase", wfRun.Phase},
|
||||
{"workflow_runs.$.resiliency_score", wfRun.ResiliencyScore},
|
||||
{"workflow_runs.$.experiments_passed", wfRun.ExperimentsPassed},
|
||||
{"workflow_runs.$.total_experiments", wfRun.TotalExperiments},
|
||||
{"workflow_runs.$.execution_data", wfRun.ExecutionData},
|
||||
{"workflow_runs.$.completed", wfRun.Completed},
|
||||
}}}
|
||||
|
@ -84,6 +90,18 @@ func GetWorkflows(query bson.D) ([]ChaosWorkFlowInput, error) {
|
|||
return workflows, nil
|
||||
}
|
||||
|
||||
// GetAggregateWorkflows takes a mongo pipeline to retrieve the workflow details from the database
|
||||
func GetAggregateWorkflows(pipeline mongo.Pipeline) (*mongo.Cursor, error) {
|
||||
ctx, _ := context.WithTimeout(backgroundContext, 10*time.Second)
|
||||
|
||||
results, err := mongodb.Operator.Aggregate(ctx, mongodb.WorkflowCollection, pipeline)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// GetWorkflowsByClusterID takes a clusterID parameter to retrieve the workflow details from the database
|
||||
func GetWorkflowsByClusterID(clusterID string) ([]ChaosWorkFlowInput, error) {
|
||||
query := bson.D{{"cluster_id", clusterID}}
|
||||
|
|
|
@ -21,20 +21,53 @@ type ChaosWorkFlowInput struct {
|
|||
CreatedAt string `bson:"created_at"`
|
||||
ProjectID string `bson:"project_id"`
|
||||
ClusterID string `bson:"cluster_id"`
|
||||
ClusterName string `bson:"cluster_name"`
|
||||
ClusterType string `bson:"cluster_type"`
|
||||
WorkflowRuns []*ChaosWorkflowRun `bson:"workflow_runs"`
|
||||
IsRemoved bool `bson:"isRemoved"`
|
||||
}
|
||||
|
||||
// ChaosWorkflowRun contains the required fields to be stored in the database for a workflow run
|
||||
type ChaosWorkflowRun struct {
|
||||
WorkflowRunID string `bson:"workflow_run_id"`
|
||||
LastUpdated string `bson:"last_updated"`
|
||||
ExecutionData string `bson:"execution_data"`
|
||||
Completed bool `bson:"completed"`
|
||||
}
|
||||
|
||||
// WeightagesInput contains the required fields to be stored in the database for a weightages input
|
||||
type WeightagesInput struct {
|
||||
ExperimentName string `bson:"experiment_name"`
|
||||
Weightage int `bson:"weightage"`
|
||||
}
|
||||
|
||||
// ChaosWorkflowRun contains the required fields to be stored in the database for a workflow run
|
||||
type ChaosWorkflowRun struct {
|
||||
WorkflowRunID string `bson:"workflow_run_id"`
|
||||
LastUpdated string `bson:"last_updated"`
|
||||
Phase string `bson:"phase"`
|
||||
ResiliencyScore *float64 `bson:"resiliency_score,string,omitempty"`
|
||||
ExperimentsPassed *int `bson:"experiments_passed,string,omitempty"`
|
||||
TotalExperiments *int `bson:"total_experiments,string,omitempty"`
|
||||
ExecutionData string `bson:"execution_data"`
|
||||
Completed bool `bson:"completed"`
|
||||
}
|
||||
|
||||
type AggregatedWorkflowRuns struct {
|
||||
TotalFilteredWorkflowRuns []TotalFilteredWorkflowRuns `bson:"total_filtered_workflow_runs"`
|
||||
FlattenedWorkflowRuns []FlattenedWorkflowRun `bson:"flattened_workflow_runs"`
|
||||
}
|
||||
|
||||
type TotalFilteredWorkflowRuns struct {
|
||||
Count int `bson:"count"`
|
||||
}
|
||||
|
||||
type FlattenedWorkflowRun struct {
|
||||
WorkflowID string `bson:"workflow_id"`
|
||||
WorkflowManifest string `bson:"workflow_manifest"`
|
||||
CronSyntax string `bson:"cronSyntax"`
|
||||
WorkflowName string `bson:"workflow_name"`
|
||||
WorkflowDescription string `bson:"workflow_description"`
|
||||
Weightages []*WeightagesInput `bson:"weightages"`
|
||||
IsCustomWorkflow bool `bson:"isCustomWorkflow"`
|
||||
UpdatedAt string `bson:"updated_at"`
|
||||
CreatedAt string `bson:"created_at"`
|
||||
ProjectID string `bson:"project_id"`
|
||||
ClusterID string `bson:"cluster_id"`
|
||||
ClusterName string `bson:"cluster_name"`
|
||||
ClusterType string `bson:"cluster_type"`
|
||||
WorkflowRuns ChaosWorkflowRun `bson:"workflow_runs"`
|
||||
IsRemoved bool `bson:"isRemoved"`
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue