List workflow optimization (#2882)

* Optimized ListWorkflow query in backend and frontend along with minor style changes

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* generated go.sum

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* Added filter to filter out removed workflows

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* Added condition to check empty workflows

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>
This commit is contained in:
Sarthak Jain 2021-06-11 13:42:12 +05:30 committed by GitHub
parent 0a7e623356
commit ac00e474f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 1037 additions and 770 deletions

View File

@ -3845,6 +3845,11 @@
"d3-time": "^2.1.1" "d3-time": "^2.1.1"
}, },
"dependencies": { "dependencies": {
"@types/d3-time": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-2.1.0.tgz",
"integrity": "sha512-qVCiT93utxN0cawScyQuNx8H82vBvZXSClZfgOu3l3dRRlRO6FjKEZlaPgXG9XUFjIAOsA4kAJY101vobHeJLQ=="
},
"d3-array": { "d3-array": {
"version": "2.12.1", "version": "2.12.1",
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz",

View File

@ -39,7 +39,7 @@
"jsonwebtoken": "^8.5.1", "jsonwebtoken": "^8.5.1",
"jspdf": "^2.1.1", "jspdf": "^2.1.1",
"jspdf-autotable": "^3.5.13", "jspdf-autotable": "^3.5.13",
"litmus-ui": "^1.1.6", "litmus-ui": "^1.1.7",
"localforage": "^1.7.3", "localforage": "^1.7.3",
"lodash": "^4.17.20", "lodash": "^4.17.20",
"moment": "^2.27.0", "moment": "^2.27.0",

View File

@ -40,54 +40,33 @@ export const WORKFLOW_DETAILS = gql`
} }
`; `;
export const SCHEDULE_DETAILS = gql`
query scheduleDetails($projectID: String!) {
getScheduledWorkflows(project_id: $projectID) {
workflow_id
workflow_manifest
cronSyntax
workflow_name
workflow_description
weightages {
experiment_name
weightage
}
isCustomWorkflow
updated_at
created_at
project_id
cluster_id
cluster_type
cluster_name
isRemoved
}
}
`;
export const WORKFLOW_LIST_DETAILS = gql` export const WORKFLOW_LIST_DETAILS = gql`
query workflowListDetails($projectID: String!, $workflowIDs: [ID]) { query workflowListDetails($workflowInput: ListWorkflowsInput!) {
ListWorkflow(project_id: $projectID, workflow_ids: $workflowIDs) { ListWorkflow(workflowInput: $workflowInput) {
workflow_id total_no_of_workflows
workflow_manifest workflows {
cronSyntax workflow_id
cluster_name workflow_manifest
workflow_name cronSyntax
workflow_description cluster_name
weightages { workflow_name
experiment_name workflow_description
weightage weightages {
} experiment_name
isCustomWorkflow weightage
updated_at }
created_at isCustomWorkflow
project_id updated_at
cluster_id created_at
cluster_type project_id
isRemoved cluster_id
workflow_runs { cluster_type
execution_data isRemoved
workflow_run_id workflow_runs {
last_updated execution_data
workflow_run_id
last_updated
}
} }
} }
} }

View File

@ -20,15 +20,6 @@ export interface ScheduleWorkflow {
regularity?: string; regularity?: string;
isRemoved: boolean; isRemoved: boolean;
} }
export interface Schedules {
getScheduledWorkflows: ScheduleWorkflow[];
}
export interface ScheduleDataVars {
projectID: string;
}
export interface DeleteSchedule { export interface DeleteSchedule {
workflow_id: string; workflow_id: string;
} }

View File

@ -51,7 +51,7 @@ export interface WorkflowRun {
workflow_run_id: string; workflow_run_id: string;
} }
export interface Workflow { export interface ScheduledWorkflow {
workflow_id: string; workflow_id: string;
workflow_manifest: string; workflow_manifest: string;
cronSyntax: string; cronSyntax: string;
@ -66,11 +66,11 @@ export interface Workflow {
cluster_id: string; cluster_id: string;
cluster_type: string; cluster_type: string;
isRemoved: Boolean; isRemoved: Boolean;
workflow_runs: WorkflowRun[]; workflow_runs?: WorkflowRun[];
} }
export interface WorkflowList { export interface WorkflowList {
ListWorkflow: Workflow[]; ListWorkflow: ScheduledWorkflow[];
} }
export interface WorkflowListDataVars { export interface WorkflowListDataVars {
@ -87,6 +87,39 @@ export interface ListManifestTemplateArray {
isCustomWorkflow: boolean; isCustomWorkflow: boolean;
} }
export interface Pagination {
page: number;
limit: number;
}
export interface ListManifestTemplate { export interface ListManifestTemplate {
ListManifestTemplate: ListManifestTemplateArray[]; ListManifestTemplate: ListManifestTemplateArray[];
} }
export interface SortInput {
field: 'Name';
descending?: Boolean;
}
export interface WorkflowFilterInput {
workflow_name?: string;
cluster_name?: string;
}
export interface ListWorkflowsInput {
workflowInput: {
project_id: string;
workflow_ids?: string[];
pagination?: Pagination;
sort?: SortInput;
filter?: WorkflowFilterInput;
};
}
export interface ListWorkflowsOutput {
totalNoOfWorkflows: number;
workflows: ScheduledWorkflow[];
}
export interface ScheduledWorkflows {
ListWorkflow: ListWorkflowsOutput;
}

View File

@ -16,13 +16,16 @@ import YamlEditor from '../../components/YamlEditor/Editor';
import { parseYamlValidations } from '../../components/YamlEditor/Validations'; import { parseYamlValidations } from '../../components/YamlEditor/Validations';
import Scaffold from '../../containers/layouts/Scaffold'; import Scaffold from '../../containers/layouts/Scaffold';
import { UPDATE_SCHEDULE } from '../../graphql/mutations'; import { UPDATE_SCHEDULE } from '../../graphql/mutations';
import { SCHEDULE_DETAILS } from '../../graphql/queries'; import { WORKFLOW_LIST_DETAILS } from '../../graphql/queries';
import { import {
CreateWorkFlowInput, CreateWorkFlowInput,
UpdateWorkflowResponse, UpdateWorkflowResponse,
WeightMap, WeightMap,
} from '../../models/graphql/createWorkflowData'; } from '../../models/graphql/createWorkflowData';
import { ScheduleDataVars, Schedules } from '../../models/graphql/scheduleData'; import {
ListWorkflowsInput,
ScheduledWorkflows,
} from '../../models/graphql/workflowListData';
import { experimentMap, WorkflowData } from '../../models/redux/workflow'; import { experimentMap, WorkflowData } from '../../models/redux/workflow';
import useActions from '../../redux/actions'; import useActions from '../../redux/actions';
import * as TabActions from '../../redux/actions/tabs'; import * as TabActions from '../../redux/actions/tabs';
@ -78,11 +81,17 @@ const EditSchedule: React.FC = () => {
const projectID = getProjectID(); const projectID = getProjectID();
const userRole = getProjectRole(); const userRole = getProjectRole();
// Apollo query to get the scheduled data const { data, loading } = useQuery<ScheduledWorkflows, ListWorkflowsInput>(
const { data, loading } = useQuery<Schedules, ScheduleDataVars>( WORKFLOW_LIST_DETAILS,
SCHEDULE_DETAILS,
{ {
variables: { projectID: paramData.scheduleProjectID }, variables: {
workflowInput: {
project_id: projectID,
filter: {
workflow_name: paramData.workflowName,
},
},
},
fetchPolicy: 'cache-and-network', fetchPolicy: 'cache-and-network',
} }
); );
@ -91,11 +100,7 @@ const EditSchedule: React.FC = () => {
(state: RootState) => state.workflowManifest.manifest (state: RootState) => state.workflowManifest.manifest
); );
const wfDetails = const wfDetails = data && data.ListWorkflow.workflows[0];
data &&
data.getScheduledWorkflows.filter(
(wf) => wf.workflow_name === paramData.workflowName
)[0];
const doc = new YAML.Document(); const doc = new YAML.Document();
const w: Weights[] = []; const w: Weights[] = [];
const { cronSyntax, clusterid, clustername } = workflowData; const { cronSyntax, clusterid, clustername } = workflowData;

View File

@ -12,9 +12,9 @@ import Scaffold from '../../containers/layouts/Scaffold';
import { WORKFLOW_LIST_DETAILS } from '../../graphql'; import { WORKFLOW_LIST_DETAILS } from '../../graphql';
import { ChaosData, ExecutionData } from '../../models/graphql/workflowData'; import { ChaosData, ExecutionData } from '../../models/graphql/workflowData';
import { import {
ListWorkflowsInput,
ScheduledWorkflows,
WeightageMap, WeightageMap,
WorkflowList,
WorkflowListDataVars,
} from '../../models/graphql/workflowListData'; } from '../../models/graphql/workflowListData';
import { getProjectID } from '../../utils/getSearchParams'; import { getProjectID } from '../../utils/getSearchParams';
import PopOver from '../../views/Analytics/WorkflowDashboard/PopOver'; import PopOver from '../../views/Analytics/WorkflowDashboard/PopOver';
@ -81,11 +81,13 @@ const AnalyticsPage: React.FC = () => {
const projectID = getProjectID(); const projectID = getProjectID();
// Apollo query to get the scheduled workflow data // Apollo query to get the scheduled workflow data
const { data, error } = useQuery<WorkflowList, WorkflowListDataVars>( const { data, error } = useQuery<ScheduledWorkflows, ListWorkflowsInput>(
WORKFLOW_LIST_DETAILS, WORKFLOW_LIST_DETAILS,
{ {
variables: { projectID, workflowIDs: [] }, variables: {
pollInterval: 100, workflowInput: { project_id: projectID, workflow_ids: [workflowId] },
},
pollInterval: 5000,
} }
); );
@ -103,9 +105,7 @@ const AnalyticsPage: React.FC = () => {
const chaosDataArray: ChaosData[] = []; const chaosDataArray: ChaosData[] = [];
const validWorkflowRunsData: WorkflowRunData[] = []; const validWorkflowRunsData: WorkflowRunData[] = [];
try { try {
const selectedWorkflowSchedule = data?.ListWorkflow.filter( const selectedWorkflowSchedule = data?.ListWorkflow.workflows;
(w) => w.workflow_id === workflowId
);
const selectedWorkflows = selectedWorkflowSchedule const selectedWorkflows = selectedWorkflowSchedule
? selectedWorkflowSchedule[0]?.workflow_runs ? selectedWorkflowSchedule[0]?.workflow_runs
: []; : [];
@ -233,13 +233,11 @@ const AnalyticsPage: React.FC = () => {
useEffect(() => { useEffect(() => {
const workflowTestsArray: WorkFlowTests[] = []; const workflowTestsArray: WorkFlowTests[] = [];
try { try {
const selectedWorkflowSchedule = data?.ListWorkflow.filter( const selectedWorkflowSchedule = data?.ListWorkflow.workflows;
(w) => w.workflow_id === workflowId
);
const workflowRuns = selectedWorkflowSchedule const workflowRuns = selectedWorkflowSchedule
? selectedWorkflowSchedule[0]?.workflow_runs ? selectedWorkflowSchedule[0]?.workflow_runs
: []; : [];
const selectedWorkflows = workflowRuns.filter( const selectedWorkflows = workflowRuns?.filter(
(w) => w.workflow_run_id === selectedWorkflowRunID (w) => w.workflow_run_id === selectedWorkflowRunID
); );
selectedWorkflows?.forEach((data) => { selectedWorkflows?.forEach((data) => {
@ -286,9 +284,7 @@ const AnalyticsPage: React.FC = () => {
}, [selectedWorkflowRunID, data]); }, [selectedWorkflowRunID, data]);
// Number of Workflow Runs for the selected Schedule // Number of Workflow Runs for the selected Schedule
const selectedWorkflowSchedule = data?.ListWorkflow.filter( const selectedWorkflowSchedule = data?.ListWorkflow.workflows;
(w) => w.workflow_id === workflowId
);
const workflowRuns = selectedWorkflowSchedule const workflowRuns = selectedWorkflowSchedule
? selectedWorkflowSchedule[0]?.workflow_runs ? selectedWorkflowSchedule[0]?.workflow_runs
: []; : [];
@ -313,7 +309,7 @@ const AnalyticsPage: React.FC = () => {
</div> </div>
<div className={classes.analyticsDiv}> <div className={classes.analyticsDiv}>
<WorkflowRunsBarChart <WorkflowRunsBarChart
numberOfWorkflowRuns={workflowRuns.length} numberOfWorkflowRuns={workflowRuns?.length ?? 0}
workflowRunData={workflowRunDataForPlot} workflowRunData={workflowRunDataForPlot}
callBackToShowPopOver={setPopOverDisplay} callBackToShowPopOver={setPopOverDisplay}
callBackToSelectWorkflowRun={( callBackToSelectWorkflowRun={(

View File

@ -10,15 +10,11 @@ import Loader from '../../components/Loader';
import { StyledTab, TabPanel } from '../../components/Tabs'; import { StyledTab, TabPanel } from '../../components/Tabs';
import Scaffold from '../../containers/layouts/Scaffold'; import Scaffold from '../../containers/layouts/Scaffold';
import { import {
SCHEDULE_DETAILS,
WORKFLOW_DETAILS_WITH_EXEC_DATA, WORKFLOW_DETAILS_WITH_EXEC_DATA,
WORKFLOW_EVENTS_WITH_EXEC_DATA, WORKFLOW_EVENTS_WITH_EXEC_DATA,
WORKFLOW_LIST_DETAILS,
} from '../../graphql'; } from '../../graphql';
import { import { ScheduleWorkflow } from '../../models/graphql/scheduleData';
ScheduleDataVars,
Schedules,
ScheduleWorkflow,
} from '../../models/graphql/scheduleData';
import { import {
ExecutionData, ExecutionData,
Workflow, Workflow,
@ -26,6 +22,10 @@ import {
WorkflowSubscription, WorkflowSubscription,
WorkflowSubscriptionInput, WorkflowSubscriptionInput,
} from '../../models/graphql/workflowData'; } from '../../models/graphql/workflowData';
import {
ListWorkflowsInput,
ScheduledWorkflows,
} from '../../models/graphql/workflowListData';
import useActions from '../../redux/actions'; import useActions from '../../redux/actions';
import * as NodeSelectionActions from '../../redux/actions/nodeSelection'; import * as NodeSelectionActions from '../../redux/actions/nodeSelection';
import * as TabActions from '../../redux/actions/tabs'; import * as TabActions from '../../redux/actions/tabs';
@ -81,20 +81,24 @@ const WorkflowDetails: React.FC = () => {
} }
); );
const workflow = data?.getWorkflowRuns.workflow_runs[0]; const workflowRun = data?.getWorkflowRuns.workflow_runs[0];
// Apollo query to get the scheduled data const { data: workflowData, loading } = useQuery<
const { data: SchedulesData, loading } = useQuery< ScheduledWorkflows,
Schedules, ListWorkflowsInput
ScheduleDataVars >(WORKFLOW_LIST_DETAILS, {
>(SCHEDULE_DETAILS, { variables: {
variables: { projectID }, workflowInput: {
project_id: projectID,
workflow_ids: [workflowRun?.workflow_id ?? ' '],
},
},
fetchPolicy: 'cache-and-network', fetchPolicy: 'cache-and-network',
}); });
// Using subscription to get realtime data // Using subscription to get realtime data
useEffect(() => { useEffect(() => {
if (workflow?.phase && workflow.phase === 'Running') { if (workflowRun?.phase && workflowRun.phase === 'Running') {
subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({ subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({
document: WORKFLOW_EVENTS_WITH_EXEC_DATA, document: WORKFLOW_EVENTS_WITH_EXEC_DATA,
variables: { projectID }, variables: { projectID },
@ -130,17 +134,13 @@ const WorkflowDetails: React.FC = () => {
}; };
useEffect(() => { useEffect(() => {
const scheduledWorkflow = SchedulesData?.getScheduledWorkflows.filter( const scheduledWorkflow = workflowData?.ListWorkflow.workflows;
(schedulesWorkflow) => {
return schedulesWorkflow.workflow_id === workflow?.workflow_id;
}
);
if (scheduledWorkflow) { if (scheduledWorkflow) {
setworkflowSchedulesDetails( setworkflowSchedulesDetails(
(scheduledWorkflow[0] ? scheduledWorkflow[0] : null) as ScheduleWorkflow (scheduledWorkflow[0] ? scheduledWorkflow[0] : null) as ScheduleWorkflow
); );
} }
}, [SchedulesData]); }, [workflowData]);
// On fresh screen refresh 'Workflow' Tab would be selected // On fresh screen refresh 'Workflow' Tab would be selected
useEffect(() => { useEffect(() => {
@ -149,15 +149,19 @@ const WorkflowDetails: React.FC = () => {
// Setting NodeId of first Node in redux for selection of first node in Argo graph by default // Setting NodeId of first Node in redux for selection of first node in Argo graph by default
useEffect(() => { useEffect(() => {
if (workflow && pod_name === '') { if (workflowRun && pod_name === '') {
if ( if (
Object.keys(JSON.parse(workflow.execution_data as string).nodes).length Object.keys(JSON.parse(workflowRun.execution_data as string).nodes)
.length
) { ) {
const firstNodeId = JSON.parse(workflow.execution_data as string).nodes[ const firstNodeId = JSON.parse(workflowRun.execution_data as string)
Object.keys(JSON.parse(workflow.execution_data as string).nodes)[0] .nodes[
Object.keys(JSON.parse(workflowRun.execution_data as string).nodes)[0]
].name; ].name;
nodeSelection.selectNode({ nodeSelection.selectNode({
...JSON.parse(workflow.execution_data as string).nodes[firstNodeId], ...JSON.parse(workflowRun.execution_data as string).nodes[
firstNodeId
],
pod_name: firstNodeId, pod_name: firstNodeId,
}); });
} else { } else {
@ -172,11 +176,11 @@ const WorkflowDetails: React.FC = () => {
<div className={classes.button}> <div className={classes.button}>
<BackButton /> <BackButton />
</div> </div>
{/* If workflow data is present then display the workflow details */} {/* If workflowRun data is present then display the workflowRun details */}
{workflow && pod_name !== '' && !loading ? ( {workflowRun && pod_name !== '' && !loading ? (
<div> <div>
<Typography data-cy="wfName" className={classes.title}> <Typography data-cy="wfName" className={classes.title}>
{t('workflowDetailsView.headerDesc')} {workflow.workflow_name} {t('workflowDetailsView.headerDesc')} {workflowRun.workflow_name}
</Typography> </Typography>
{/* AppBar */} {/* AppBar */}
@ -204,7 +208,8 @@ const WorkflowDetails: React.FC = () => {
{/* Argo Workflow DAG Graph */} {/* Argo Workflow DAG Graph */}
<ArgoWorkflow <ArgoWorkflow
nodes={ nodes={
(JSON.parse(workflow.execution_data) as ExecutionData).nodes (JSON.parse(workflowRun.execution_data) as ExecutionData)
.nodes
} }
setIsInfoToggled={setIsInfoToggled} setIsInfoToggled={setIsInfoToggled}
/> />
@ -212,9 +217,9 @@ const WorkflowDetails: React.FC = () => {
{isInfoToggled ? ( {isInfoToggled ? (
<div> <div>
{pod_name !== {pod_name !==
JSON.parse(workflow.execution_data).nodes[ JSON.parse(workflowRun.execution_data).nodes[
Object.keys( Object.keys(
JSON.parse(workflow.execution_data as string).nodes JSON.parse(workflowRun.execution_data as string).nodes
)[0] )[0]
].name ? ( ].name ? (
/* Node details and Logs */ /* Node details and Logs */
@ -223,10 +228,12 @@ const WorkflowDetails: React.FC = () => {
workflowSchedulesDetails?.workflow_manifest as string workflowSchedulesDetails?.workflow_manifest as string
} }
setIsInfoToggled={setIsInfoToggled} setIsInfoToggled={setIsInfoToggled}
cluster_id={workflow.cluster_id} cluster_id={workflowRun.cluster_id}
workflow_run_id={workflow.workflow_run_id} workflow_run_id={workflowRun.workflow_run_id}
data={ data={
JSON.parse(workflow.execution_data) as ExecutionData JSON.parse(
workflowRun.execution_data
) as ExecutionData
} }
/> />
) : ( ) : (
@ -234,11 +241,13 @@ const WorkflowDetails: React.FC = () => {
<WorkflowInfo <WorkflowInfo
tab={1} tab={1}
setIsInfoToggled={setIsInfoToggled} setIsInfoToggled={setIsInfoToggled}
cluster_name={workflow.cluster_name} cluster_name={workflowRun.cluster_name}
data={ data={
JSON.parse(workflow.execution_data) as ExecutionData JSON.parse(
workflowRun.execution_data
) as ExecutionData
} }
resiliency_score={workflow.resiliency_score} resiliency_score={workflowRun.resiliency_score}
/> />
)} )}
</div> </div>
@ -249,24 +258,24 @@ const WorkflowDetails: React.FC = () => {
{/* Workflow Info */} {/* Workflow Info */}
<WorkflowInfo <WorkflowInfo
tab={2} tab={2}
cluster_name={workflow.cluster_name} cluster_name={workflowRun.cluster_name}
data={JSON.parse(workflow.execution_data) as ExecutionData} data={JSON.parse(workflowRun.execution_data) as ExecutionData}
resiliency_score={workflow.resiliency_score} resiliency_score={workflowRun.resiliency_score}
/> />
{/* Table for all Node details */} {/* Table for all Node details */}
<NodeTable <NodeTable
manifest={workflowSchedulesDetails?.workflow_manifest as string} manifest={workflowSchedulesDetails?.workflow_manifest as string}
data={JSON.parse(workflow.execution_data) as ExecutionData} data={JSON.parse(workflowRun.execution_data) as ExecutionData}
handleClose={() => setLogsModalOpen(true)} handleClose={() => setLogsModalOpen(true)}
/> />
{/* Modal for viewing logs of a node */} {/* Modal for viewing logs of a node */}
<NodeLogsModal <NodeLogsModal
logsOpen={logsModalOpen} logsOpen={logsModalOpen}
handleClose={() => setLogsModalOpen(false)} handleClose={() => setLogsModalOpen(false)}
cluster_id={workflow.cluster_id} cluster_id={workflowRun.cluster_id}
workflow_run_id={workflow.workflow_run_id} workflow_run_id={workflowRun.workflow_run_id}
data={JSON.parse(workflow.execution_data) as ExecutionData} data={JSON.parse(workflowRun.execution_data) as ExecutionData}
workflow_name={workflow.workflow_name} workflow_name={workflowRun.workflow_name}
/> />
</TabPanel> </TabPanel>
</div> </div>

View File

@ -9,7 +9,7 @@ import {
} from '@material-ui/core'; } from '@material-ui/core';
import React from 'react'; import React from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { Workflow } from '../../../../models/graphql/workflowListData'; import { ScheduledWorkflow } from '../../../../models/graphql/workflowListData';
import useActions from '../../../../redux/actions'; import useActions from '../../../../redux/actions';
import * as TabActions from '../../../../redux/actions/tabs'; import * as TabActions from '../../../../redux/actions/tabs';
import { history } from '../../../../redux/configureStore'; import { history } from '../../../../redux/configureStore';
@ -22,11 +22,13 @@ import { GetTimeDiff } from '../../../../utils/timeDifferenceString';
import useStyles from '../styles'; import useStyles from '../styles';
interface TableScheduleWorkflow { interface TableScheduleWorkflow {
scheduleWorkflowList: Workflow[] | undefined; scheduleWorkflowList: ScheduledWorkflow[];
totalNoOfWorkflows: number;
} }
const TableScheduleWorkflow: React.FC<TableScheduleWorkflow> = ({ const TableScheduleWorkflow: React.FC<TableScheduleWorkflow> = ({
scheduleWorkflowList, scheduleWorkflowList,
totalNoOfWorkflows,
}) => { }) => {
const classes = useStyles(); const classes = useStyles();
const { t } = useTranslation(); const { t } = useTranslation();
@ -37,13 +39,13 @@ const TableScheduleWorkflow: React.FC<TableScheduleWorkflow> = ({
return ( return (
<div> <div>
{scheduleWorkflowList && scheduleWorkflowList.length > 0 ? ( {scheduleWorkflowList.length > 0 ? (
<Paper className={classes.dataTable}> <Paper className={classes.dataTable}>
<div className={classes.tableHeading}> <div className={classes.tableHeading}>
<Typography variant="h4" className={classes.weightedHeading}> <Typography variant="h4" className={classes.weightedHeading}>
{t('analyticsDashboard.workflowScheduleTable.title')} {t('analyticsDashboard.workflowScheduleTable.title')}
</Typography> </Typography>
{scheduleWorkflowList.length > 3 ? ( {totalNoOfWorkflows > 3 ? (
<IconButton <IconButton
className={classes.seeAllArrowBtn} className={classes.seeAllArrowBtn}
onClick={() => { onClick={() => {
@ -65,7 +67,7 @@ const TableScheduleWorkflow: React.FC<TableScheduleWorkflow> = ({
</div> </div>
<Table className={classes.tableStyling}> <Table className={classes.tableStyling}>
<TableBody> <TableBody>
{scheduleWorkflowList.slice(0, 3).map((schedule) => ( {scheduleWorkflowList.map((schedule) => (
<TableRow <TableRow
key={schedule.workflow_id} key={schedule.workflow_id}
className={classes.tableRow} className={classes.tableRow}

View File

@ -1,7 +1,3 @@
/* eslint-disable prefer-destructuring */
/* eslint-disable no-unused-expressions */
/* eslint-disable no-return-assign */
import { useQuery } from '@apollo/client'; import { useQuery } from '@apollo/client';
import React from 'react'; import React from 'react';
import { LocalQuickActionCard } from '../../../components/LocalQuickActionCard'; import { LocalQuickActionCard } from '../../../components/LocalQuickActionCard';
@ -12,7 +8,6 @@ import {
} from '../../../graphql/queries'; } from '../../../graphql/queries';
import { import {
DashboardList, DashboardList,
ListDashboardResponse,
ListDashboardVars, ListDashboardVars,
} from '../../../models/graphql/dashboardsDetails'; } from '../../../models/graphql/dashboardsDetails';
import { import {
@ -21,9 +16,8 @@ import {
ListDataSourceVars, ListDataSourceVars,
} from '../../../models/graphql/dataSourceDetails'; } from '../../../models/graphql/dataSourceDetails';
import { import {
Workflow, ListWorkflowsInput,
WorkflowList, ScheduledWorkflows,
WorkflowListDataVars,
} from '../../../models/graphql/workflowListData'; } from '../../../models/graphql/workflowListData';
import { getProjectID } from '../../../utils/getSearchParams'; import { getProjectID } from '../../../utils/getSearchParams';
import { sortNumAsc } from '../../../utils/sort'; import { sortNumAsc } from '../../../utils/sort';
@ -40,25 +34,26 @@ const Overview: React.FC = () => {
const projectID = getProjectID(); const projectID = getProjectID();
// Apollo query to get the scheduled workflow data // Apollo query to get the scheduled workflow data
const { data: schedulesData } = useQuery<WorkflowList, WorkflowListDataVars>( const { data: schedulesData } = useQuery<
WORKFLOW_LIST_DETAILS, ScheduledWorkflows,
{ ListWorkflowsInput
variables: { >(WORKFLOW_LIST_DETAILS, {
projectID, variables: {
workflowIDs: [], workflowInput: {
project_id: projectID,
pagination: {
page: 0,
limit: 3,
},
}, },
fetchPolicy: 'cache-and-network', },
pollInterval: 10000, fetchPolicy: 'cache-and-network',
} pollInterval: 10000,
); });
const filteredScheduleData = schedulesData?.ListWorkflow.slice().sort( const filteredScheduleData = schedulesData?.ListWorkflow.workflows;
(a: Workflow, b: Workflow) => { const totalScheduledWorkflows =
const x = parseInt(a.updated_at, 10); schedulesData?.ListWorkflow.totalNoOfWorkflows;
const y = parseInt(b.updated_at, 10);
return sortNumAsc(y, x);
}
);
// Apollo query to get the dashboard data // Apollo query to get the dashboard data
const { data: dashboardsList } = useQuery<DashboardList, ListDashboardVars>( const { data: dashboardsList } = useQuery<DashboardList, ListDashboardVars>(
@ -73,13 +68,11 @@ const Overview: React.FC = () => {
); );
const filteredDashboardData = dashboardsList?.ListDashboard const filteredDashboardData = dashboardsList?.ListDashboard
? dashboardsList?.ListDashboard.slice().sort( ? dashboardsList?.ListDashboard.slice().sort((a, b) => {
(a: ListDashboardResponse, b: ListDashboardResponse) => { const x = parseInt(a.updated_at, 10);
const x = parseInt(a.updated_at, 10); const y = parseInt(b.updated_at, 10);
const y = parseInt(b.updated_at, 10); return sortNumAsc(y, x);
return sortNumAsc(y, x); })
}
)
: []; : [];
// Query for dataSource // Query for dataSource
const { data } = useQuery<DataSourceList, ListDataSourceVars>( const { data } = useQuery<DataSourceList, ListDataSourceVars>(
@ -117,7 +110,10 @@ const Overview: React.FC = () => {
))} ))}
<TableDataSource dataSourceList={filteredDataSourceData} /> <TableDataSource dataSourceList={filteredDataSourceData} />
<TableDashboardData dashboardDataList={filteredDashboardData} /> <TableDashboardData dashboardDataList={filteredDashboardData} />
<TableScheduleWorkflow scheduleWorkflowList={filteredScheduleData} /> <TableScheduleWorkflow
scheduleWorkflowList={filteredScheduleData ?? []}
totalNoOfWorkflows={totalScheduledWorkflows ?? 0}
/>
{((filteredScheduleData && filteredScheduleData.length === 0) || {((filteredScheduleData && filteredScheduleData.length === 0) ||
!filteredScheduleData) && ( !filteredScheduleData) && (

View File

@ -5,7 +5,7 @@ import moment from 'moment';
import React from 'react'; import React from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import { CheckBox } from '../../../../components/CheckBox'; import { CheckBox } from '../../../../components/CheckBox';
import { Workflow } from '../../../../models/graphql/workflowListData'; import { ScheduledWorkflow } from '../../../../models/graphql/workflowListData';
import { history } from '../../../../redux/configureStore'; import { history } from '../../../../redux/configureStore';
import { import {
getProjectID, getProjectID,
@ -14,7 +14,7 @@ import {
import useStyles, { StyledTableCell } from './styles'; import useStyles, { StyledTableCell } from './styles';
interface TableDataProps { interface TableDataProps {
data: Workflow; data: ScheduledWorkflow;
itemSelectionStatus: boolean; itemSelectionStatus: boolean;
labelIdentifier: string; labelIdentifier: string;
comparisonState: Boolean; comparisonState: Boolean;

View File

@ -25,10 +25,10 @@ import Loader from '../../../../components/Loader';
import { WORKFLOW_LIST_DETAILS } from '../../../../graphql/queries'; import { WORKFLOW_LIST_DETAILS } from '../../../../graphql/queries';
import { import {
ExecutionData, ExecutionData,
ListWorkflowsInput,
ScheduledWorkflow,
ScheduledWorkflows,
WeightageMap, WeightageMap,
Workflow,
WorkflowList,
WorkflowListDataVars,
} from '../../../../models/graphql/workflowListData'; } from '../../../../models/graphql/workflowListData';
import { getProjectID } from '../../../../utils/getSearchParams'; import { getProjectID } from '../../../../utils/getSearchParams';
import { import {
@ -104,7 +104,7 @@ const WorkflowComparisonTable = () => {
}, },
searchTokens: [''], searchTokens: [''],
}); });
const [displayData, setDisplayData] = useState<Workflow[]>([]); const [displayData, setDisplayData] = useState<ScheduledWorkflow[]>([]);
const [clusters, setClusters] = React.useState<string[]>([]); const [clusters, setClusters] = React.useState<string[]>([]);
const [page, setPage] = React.useState(0); const [page, setPage] = React.useState(0);
const [rowsPerPage, setRowsPerPage] = React.useState(5); const [rowsPerPage, setRowsPerPage] = React.useState(5);
@ -127,15 +127,15 @@ const WorkflowComparisonTable = () => {
const projectID = getProjectID(); const projectID = getProjectID();
// Apollo query to get the scheduled workflow data // Apollo query to get the scheduled workflow data
const { data, loading, error } = useQuery<WorkflowList, WorkflowListDataVars>( const { data, loading, error } = useQuery<
WORKFLOW_LIST_DETAILS, ScheduledWorkflows,
{ ListWorkflowsInput
variables: { projectID, workflowIDs: [] }, >(WORKFLOW_LIST_DETAILS, {
fetchPolicy: 'cache-and-network', variables: { workflowInput: { project_id: projectID } },
} fetchPolicy: 'cache-and-network',
); });
const getClusters = (searchingData: Workflow[]) => { const getClusters = (searchingData: ScheduledWorkflow[]) => {
const uniqueList: string[] = []; const uniqueList: string[] = [];
searchingData.forEach((data) => { searchingData.forEach((data) => {
if (!uniqueList.includes(data.cluster_name)) { if (!uniqueList.includes(data.cluster_name)) {
@ -158,7 +158,9 @@ const WorkflowComparisonTable = () => {
const handleSelectAllClick = (event: React.ChangeEvent<HTMLInputElement>) => { const handleSelectAllClick = (event: React.ChangeEvent<HTMLInputElement>) => {
if (event.target.checked) { if (event.target.checked) {
const newSelecteds = displayData.map((n: Workflow) => n.workflow_id); const newSelecteds = displayData.map(
(n: ScheduledWorkflow) => n.workflow_id
);
setSelected(newSelecteds); setSelected(newSelecteds);
return; return;
} }
@ -185,13 +187,13 @@ const WorkflowComparisonTable = () => {
}; };
const searchingDataRetriever = () => { const searchingDataRetriever = () => {
let searchingData: Workflow[] = []; let searchingData: ScheduledWorkflow[] = [];
if (compare === false) { if (compare === false) {
searchingData = data?.ListWorkflow ?? []; searchingData = data?.ListWorkflow.workflows ?? [];
} else { } else {
const searchedData: Workflow[] = []; const searchedData: ScheduledWorkflow[] = [];
selected.forEach((workflowID) => { selected.forEach((workflowID) => {
data?.ListWorkflow.forEach((workflow) => { data?.ListWorkflow.workflows.forEach((workflow) => {
if (workflow.workflow_id === workflowID) { if (workflow.workflow_id === workflowID) {
searchedData.push(workflow); searchedData.push(workflow);
} }
@ -230,14 +232,16 @@ const WorkflowComparisonTable = () => {
const totalValidWorkflowRuns: WorkflowDataForExport[] = []; const totalValidWorkflowRuns: WorkflowDataForExport[] = [];
const timeSeriesArray: DatedResilienceScore[][] = []; const timeSeriesArray: DatedResilienceScore[][] = [];
selected.forEach((workflow) => { selected.forEach((workflow) => {
const workflowData = data?.ListWorkflow.filter(function match(wkf) { const workflowData = data?.ListWorkflow.workflows.filter(function match(
wkf
) {
return wkf.workflow_id === workflow; return wkf.workflow_id === workflow;
}); });
const runs = workflowData ? workflowData[0].workflow_runs : []; const runs = workflowData ? workflowData[0].workflow_runs : [];
const workflowTimeSeriesData: DatedResilienceScore[] = []; const workflowTimeSeriesData: DatedResilienceScore[] = [];
let isWorkflowValid: boolean = false; let isWorkflowValid: boolean = false;
try { try {
runs.forEach((data) => { runs?.forEach((data) => {
try { try {
const executionData: ExecutionData = JSON.parse( const executionData: ExecutionData = JSON.parse(
data.execution_data data.execution_data
@ -416,11 +420,11 @@ const WorkflowComparisonTable = () => {
const CallbackForComparing = (compareWorkflows: boolean) => { const CallbackForComparing = (compareWorkflows: boolean) => {
setCompare(compareWorkflows); setCompare(compareWorkflows);
const payload: Workflow[] = []; const payload: ScheduledWorkflow[] = [];
selected.forEach((workflow) => { selected.forEach((workflow) => {
displayData.forEach((displayWorkflow, i) => { displayData.forEach((displayWorkflow, i) => {
if (displayWorkflow.workflow_id === workflow && data) { if (displayWorkflow.workflow_id === workflow && data) {
payload.push(data.ListWorkflow[i]); payload.push(data?.ListWorkflow.workflows[i]);
} }
}); });
}); });
@ -554,13 +558,13 @@ const WorkflowComparisonTable = () => {
}; };
useEffect(() => { useEffect(() => {
setDisplayData(data ? data.ListWorkflow : []); setDisplayData(data ? data.ListWorkflow.workflows : []);
getClusters(data ? data.ListWorkflow : []); getClusters(data ? data.ListWorkflow.workflows : []);
}, [data]); }, [data]);
useEffect(() => { useEffect(() => {
const payload = searchingDataRetriever() const payload = searchingDataRetriever()
.filter((wkf: Workflow) => { .filter((wkf) => {
return filter.searchTokens.every( return filter.searchTokens.every(
(s: string) => (s: string) =>
wkf.workflow_name.toLowerCase().includes(s) || wkf.workflow_name.toLowerCase().includes(s) ||
@ -589,7 +593,7 @@ const WorkflowComparisonTable = () => {
) )
).getTime(); ).getTime();
}) })
.sort((a: Workflow, b: Workflow) => { .sort((a, b) => {
// Sorting based on unique fields // Sorting based on unique fields
if (filter.sortData.name.sort) { if (filter.sortData.name.sort) {
const x = a.workflow_name; const x = a.workflow_name;
@ -752,7 +756,7 @@ const WorkflowComparisonTable = () => {
page * rowsPerPage, page * rowsPerPage,
page * rowsPerPage + rowsPerPage page * rowsPerPage + rowsPerPage
) )
.map((data: Workflow, index: number) => { .map((data, index) => {
const isItemSelected = isSelected(data.workflow_id); const isItemSelected = isSelected(data.workflow_id);
const labelId = `enhanced-table-checkbox-${index}`; const labelId = `enhanced-table-checkbox-${index}`;
return ( return (
@ -799,7 +803,7 @@ const WorkflowComparisonTable = () => {
</Table> </Table>
</TableContainer> </TableContainer>
{/* </MuiThemeProvider> */} {/* </MuiThemeProvider> */}
{compare === false || showAll === true ? ( {!compare || showAll ? (
<TablePagination <TablePagination
rowsPerPageOptions={[5, 10, 25, 50]} rowsPerPageOptions={[5, 10, 25, 50]}
component="div" component="div"

View File

@ -22,7 +22,7 @@ import React from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import YAML from 'yaml'; import YAML from 'yaml';
import { RERUN_CHAOS_WORKFLOW } from '../../../graphql/mutations'; import { RERUN_CHAOS_WORKFLOW } from '../../../graphql/mutations';
import { ScheduleWorkflow } from '../../../models/graphql/scheduleData'; import { ScheduledWorkflow } from '../../../models/graphql/workflowListData';
import useActions from '../../../redux/actions'; import useActions from '../../../redux/actions';
import * as TabActions from '../../../redux/actions/tabs'; import * as TabActions from '../../../redux/actions/tabs';
import * as WorkflowActions from '../../../redux/actions/workflow'; import * as WorkflowActions from '../../../redux/actions/workflow';
@ -35,9 +35,9 @@ import SaveTemplateModal from './SaveTemplateModal';
import useStyles from './styles'; import useStyles from './styles';
interface TableDataProps { interface TableDataProps {
data: ScheduleWorkflow; data: ScheduledWorkflow;
deleteRow: (wfid: string) => void; deleteRow: (wfid: string) => void;
handleToggleSchedule: (schedule: ScheduleWorkflow) => void; handleToggleSchedule: (schedule: ScheduledWorkflow) => void;
} }
const TableData: React.FC<TableDataProps> = ({ const TableData: React.FC<TableDataProps> = ({

View File

@ -26,38 +26,27 @@ import YAML from 'yaml';
import Loader from '../../../components/Loader'; import Loader from '../../../components/Loader';
import { import {
DELETE_WORKFLOW, DELETE_WORKFLOW,
SCHEDULE_DETAILS, GET_CLUSTER_NAMES,
UPDATE_SCHEDULE, UPDATE_SCHEDULE,
WORKFLOW_LIST_DETAILS,
} from '../../../graphql'; } from '../../../graphql';
import { Clusters, ClusterVars } from '../../../models/graphql/clusterData';
import { WeightMap } from '../../../models/graphql/createWorkflowData'; import { WeightMap } from '../../../models/graphql/createWorkflowData';
import { DeleteSchedule } from '../../../models/graphql/scheduleData';
import { import {
DeleteSchedule, ListWorkflowsInput,
ScheduleDataVars, Pagination,
Schedules, ScheduledWorkflow,
ScheduleWorkflow, ScheduledWorkflows,
} from '../../../models/graphql/scheduleData'; SortInput,
WorkflowFilterInput,
} from '../../../models/graphql/workflowListData';
import { getProjectID } from '../../../utils/getSearchParams'; import { getProjectID } from '../../../utils/getSearchParams';
import {
sortAlphaAsc,
sortAlphaDesc,
sortNumAsc,
sortNumDesc,
} from '../../../utils/sort';
import useStyles from './styles'; import useStyles from './styles';
import TableData from './TableData'; import TableData from './TableData';
interface FilterOption { interface FilterOption extends WorkflowFilterInput {
search: string; suspended?: string;
cluster: string;
suspended: string;
}
interface PaginationData {
pageNo: number;
rowsPerPage: number;
}
interface SortData {
startDate: { sort: boolean; ascending: boolean };
name: { sort: boolean; ascending: boolean };
} }
const BrowseSchedule: React.FC = () => { const BrowseSchedule: React.FC = () => {
@ -65,33 +54,60 @@ const BrowseSchedule: React.FC = () => {
const projectID = getProjectID(); const projectID = getProjectID();
const { t } = useTranslation(); const { t } = useTranslation();
// Apollo query to get the scheduled data // State for pagination
const { data, loading, error } = useQuery<Schedules, ScheduleDataVars>( const [paginationData, setPaginationData] = useState<Pagination>({
SCHEDULE_DETAILS, page: 0,
{ limit: 10,
variables: { projectID },
fetchPolicy: 'cache-and-network',
}
);
// Apollo mutation to delete the selected schedule
const [deleteSchedule] = useMutation<DeleteSchedule>(DELETE_WORKFLOW, {
refetchQueries: [{ query: SCHEDULE_DETAILS, variables: { projectID } }],
}); });
// State for search and filtering // States for filters
const [filter, setFilter] = React.useState<FilterOption>({ const [filters, setFilters] = useState<FilterOption>({
search: '', workflow_name: '',
cluster: 'All', cluster_name: 'All',
suspended: 'All', suspended: 'All',
}); });
// State for sorting
const [sortData, setSortData] = useState<SortInput>({
field: 'Name',
descending: true,
});
// Apollo query to get the scheduled data
const { data, refetch, loading, error } = useQuery<
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
variables: {
workflowInput: {
project_id: projectID,
pagination: {
page: paginationData.page,
limit: paginationData.limit,
},
sort: sortData,
filter: {
workflow_name: filters.workflow_name,
cluster_name: filters.cluster_name,
},
},
},
fetchPolicy: 'cache-and-network',
});
// Apollo mutation to delete the selected schedule
const [deleteSchedule] = useMutation<DeleteSchedule>(DELETE_WORKFLOW, {
onCompleted: () => refetch(),
});
// State for search and filtering
const [updateSchedule] = useMutation(UPDATE_SCHEDULE, { const [updateSchedule] = useMutation(UPDATE_SCHEDULE, {
refetchQueries: [{ query: SCHEDULE_DETAILS, variables: { projectID } }], onCompleted: () => refetch(),
}); });
// Disable and re-enable a schedule // Disable and re-enable a schedule
const handleToggleSchedule = (schedule: ScheduleWorkflow) => { const handleToggleSchedule = (schedule: ScheduledWorkflow) => {
const yaml = YAML.parse(schedule.workflow_manifest); const yaml = YAML.parse(schedule.workflow_manifest);
if (yaml.spec.suspend === undefined || yaml.spec.suspend === false) { if (yaml.spec.suspend === undefined || yaml.spec.suspend === false) {
yaml.spec.suspend = true; yaml.spec.suspend = true;
@ -125,66 +141,25 @@ const BrowseSchedule: React.FC = () => {
}); });
}; };
// State for pagination // Query to get list of Clusters
const [paginationData, setPaginationData] = useState<PaginationData>({ const { data: clusterList } = useQuery<Partial<Clusters>, ClusterVars>(
pageNo: 0, GET_CLUSTER_NAMES,
rowsPerPage: 5, {
}); variables: {
project_id: projectID,
},
}
);
// State for sorting const filteredWorkflows = data?.ListWorkflow.workflows.filter((dataRow) =>
const [sortData, setSortData] = useState<SortData>({ filters.suspended === 'All'
name: { sort: false, ascending: true }, ? true
startDate: { sort: true, ascending: true }, : filters.suspended === 'true'
}); ? YAML.parse(dataRow.workflow_manifest).spec.suspend === true
: filters.suspended === 'false'
const getClusters = (searchingData: ScheduleWorkflow[]) => { ? YAML.parse(dataRow.workflow_manifest).spec.suspend === undefined
const uniqueList: string[] = []; : false
searchingData.forEach((data) => { );
if (!uniqueList.includes(data.cluster_name)) {
uniqueList.push(data.cluster_name);
}
});
return uniqueList;
};
const filteredData = data?.getScheduledWorkflows
.filter((dataRow) =>
dataRow.workflow_name.toLowerCase().includes(filter.search.toLowerCase())
)
.filter((dataRow) =>
filter.cluster === 'All'
? true
: dataRow.cluster_name
.toLowerCase()
.includes(filter.cluster.toLowerCase())
)
.filter((dataRow) =>
filter.suspended === 'All'
? true
: filter.suspended === 'true'
? YAML.parse(dataRow.workflow_manifest).spec.suspend === true
: filter.suspended === 'false'
? YAML.parse(dataRow.workflow_manifest).spec.suspend === undefined
: false
)
.sort((a: ScheduleWorkflow, b: ScheduleWorkflow) => {
// Sorting based on unique fields
if (sortData.name.sort) {
const x = a.workflow_name;
const y = b.workflow_name;
return sortData.name.ascending
? sortAlphaAsc(x, y)
: sortAlphaDesc(x, y);
}
if (sortData.startDate.sort) {
const x = parseInt(a.updated_at, 10);
const y = parseInt(b.updated_at, 10);
return sortData.startDate.ascending
? sortNumAsc(y, x)
: sortNumDesc(y, x);
}
return 0;
});
const deleteRow = (wfid: string) => { const deleteRow = (wfid: string) => {
deleteSchedule({ deleteSchedule({
@ -200,9 +175,12 @@ const BrowseSchedule: React.FC = () => {
id="input-with-icon-adornment" id="input-with-icon-adornment"
placeholder="Search" placeholder="Search"
className={classes.search} className={classes.search}
value={filter.search} value={filters.workflow_name}
onChange={(event) => onChange={(event) =>
setFilter({ ...filter, search: event.target.value as string }) setFilters({
...filters,
workflow_name: event.target.value as string,
})
} }
startAdornment={ startAdornment={
<InputAdornment position="start"> <InputAdornment position="start">
@ -218,10 +196,10 @@ const BrowseSchedule: React.FC = () => {
> >
<InputLabel className={classes.selectText}>Name</InputLabel> <InputLabel className={classes.selectText}>Name</InputLabel>
<Select <Select
value={filter.suspended} value={filters.suspended}
onChange={(event) => onChange={(event) =>
setFilter({ setFilters({
...filter, ...filters,
suspended: event.target.value as string, suspended: event.target.value as string,
}) })
} }
@ -249,19 +227,25 @@ const BrowseSchedule: React.FC = () => {
> >
<InputLabel className={classes.selectText}>Target Agent</InputLabel> <InputLabel className={classes.selectText}>Target Agent</InputLabel>
<Select <Select
value={filter.cluster} value={filters.cluster_name}
onChange={(event) => onChange={(event) =>
setFilter({ ...filter, cluster: event.target.value as string }) setFilters({
...filters,
cluster_name: event.target.value as string,
})
} }
label="Target Cluster" label="Target Cluster"
className={classes.selectText} className={classes.selectText}
> >
<MenuItem value="All">All</MenuItem> <MenuItem value="All">All</MenuItem>
{(data ? getClusters(data.getScheduledWorkflows) : []).map( {clusterList?.getCluster?.map((cluster) => (
(cluster: any) => ( <MenuItem
<MenuItem value={cluster}>{cluster}</MenuItem> key={cluster.cluster_name}
) value={cluster.cluster_name}
)} >
{cluster.cluster_name}
</MenuItem>
))}
</Select> </Select>
</FormControl> </FormControl>
</div> </div>
@ -287,9 +271,8 @@ const BrowseSchedule: React.FC = () => {
size="small" size="small"
onClick={() => onClick={() =>
setSortData({ setSortData({
...sortData, field: 'Name',
name: { sort: false, ascending: false }, descending: false,
startDate: { sort: false, ascending: false },
}) })
} }
> >
@ -300,9 +283,8 @@ const BrowseSchedule: React.FC = () => {
size="small" size="small"
onClick={() => onClick={() =>
setSortData({ setSortData({
...sortData, field: 'Name',
name: { sort: false, ascending: true }, descending: true,
startDate: { sort: true, ascending: true },
}) })
} }
> >
@ -356,25 +338,19 @@ const BrowseSchedule: React.FC = () => {
<Typography align="center">Unable to fetch data</Typography> <Typography align="center">Unable to fetch data</Typography>
</TableCell> </TableCell>
</TableRow> </TableRow>
) : filteredData && filteredData.length ? ( ) : filteredWorkflows && filteredWorkflows.length ? (
filteredData filteredWorkflows.map((data) => (
.slice( <TableRow
paginationData.pageNo * paginationData.rowsPerPage, data-cy="workflowSchedulesTableRow"
paginationData.pageNo * paginationData.rowsPerPage + key={data.workflow_id}
paginationData.rowsPerPage >
) <TableData
.map((data: ScheduleWorkflow) => ( data={data}
<TableRow deleteRow={deleteRow}
data-cy="workflowSchedulesTableRow" handleToggleSchedule={handleToggleSchedule}
key={data.workflow_id} />
> </TableRow>
<TableData ))
data={data}
deleteRow={deleteRow}
handleToggleSchedule={handleToggleSchedule}
/>
</TableRow>
))
) : ( ) : (
<TableRow> <TableRow>
<TableCell data-cy="browseScheduleNoData" colSpan={7}> <TableCell data-cy="browseScheduleNoData" colSpan={7}>
@ -388,19 +364,19 @@ const BrowseSchedule: React.FC = () => {
{/* Pagination Section */} {/* Pagination Section */}
<TablePagination <TablePagination
rowsPerPageOptions={[5, 10, 25]} rowsPerPageOptions={[10, 25, 50]}
component="div" component="div"
count={filteredData?.length ?? 0} count={filteredWorkflows?.length ?? 0}
rowsPerPage={paginationData.rowsPerPage} rowsPerPage={paginationData.limit}
page={paginationData.pageNo} page={paginationData.page}
onChangePage={(_, page) => onChangePage={(_, page) =>
setPaginationData({ ...paginationData, pageNo: page }) setPaginationData({ ...paginationData, page })
} }
onChangeRowsPerPage={(event) => { onChangeRowsPerPage={(event) => {
setPaginationData({ setPaginationData({
...paginationData, ...paginationData,
pageNo: 0, page: 0,
rowsPerPage: parseInt(event.target.value, 10), limit: parseInt(event.target.value, 10),
}); });
}} }}
/> />

View File

@ -22,8 +22,8 @@ import {
} from '../../../graphql'; } from '../../../graphql';
import { WorkflowRun } from '../../../models/graphql/workflowData'; import { WorkflowRun } from '../../../models/graphql/workflowData';
import { import {
WorkflowList, ListWorkflowsInput,
WorkflowListDataVars, ScheduledWorkflows,
} from '../../../models/graphql/workflowListData'; } from '../../../models/graphql/workflowListData';
import useActions from '../../../redux/actions'; import useActions from '../../../redux/actions';
import * as NodeSelectionActions from '../../../redux/actions/nodeSelection'; import * as NodeSelectionActions from '../../../redux/actions/nodeSelection';
@ -62,12 +62,14 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
}; };
const { data: scheduledWorkflowData } = useQuery< const { data: scheduledWorkflowData } = useQuery<
WorkflowList, ScheduledWorkflows,
WorkflowListDataVars ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, { >(WORKFLOW_LIST_DETAILS, {
variables: { variables: {
projectID, workflowInput: {
workflowIDs: [data.workflow_id as string], project_id: projectID,
workflow_ids: [data.workflow_id ?? ''],
},
}, },
}); });
@ -298,7 +300,11 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
> >
<Typography className={classes.boldText}> <Typography className={classes.boldText}>
{t('chaosWorkflows.browseWorkflows.tableData.showExperiments')}( {t('chaosWorkflows.browseWorkflows.tableData.showExperiments')}(
{scheduledWorkflowData?.ListWorkflow[0].weightages.length}) {
scheduledWorkflowData?.ListWorkflow.workflows[0].weightages
.length
}
)
</Typography> </Typography>
<div className={classes.experimentDetails}> <div className={classes.experimentDetails}>
{isOpen ? ( {isOpen ? (
@ -323,7 +329,7 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
}} }}
> >
<div className={classes.popover}> <div className={classes.popover}>
{scheduledWorkflowData?.ListWorkflow[0].weightages.map( {scheduledWorkflowData?.ListWorkflow.workflows[0].weightages.map(
(weightEntry) => ( (weightEntry) => (
<div <div
key={weightEntry.experiment_name} key={weightEntry.experiment_name}

View File

@ -136,11 +136,13 @@ const useStyles = makeStyles((theme) => ({
flexDirection: 'row', flexDirection: 'row',
cursor: 'pointer', cursor: 'pointer',
}, },
btnImg: { btnImg: {
width: '0.8125rem', width: '0.8125rem',
height: '0.8125rem', height: '0.8125rem',
marginTop: theme.spacing(0.375), marginTop: theme.spacing(0.375),
}, },
btnText: { btnText: {
paddingLeft: theme.spacing(1.625), paddingLeft: theme.spacing(1.625),
}, },

View File

@ -28,8 +28,8 @@ const useStyles = makeStyles((theme) => ({
backgroundColor: theme.palette.warning.light, backgroundColor: theme.palette.warning.light,
}, },
failed: { failed: {
color: theme.palette.error.main, color: theme.palette.status.failed,
backgroundColor: theme.palette.error.light, backgroundColor: theme.palette.status.failed,
}, },
statusFont: { statusFont: {
fontSize: '0.725rem', fontSize: '0.725rem',

View File

@ -397,7 +397,7 @@ const VerifyCommit = forwardRef(
fullWidth fullWidth
multiline multiline
error={checkNameValidation()} error={checkNameValidation()}
onSave={(value) => onSave={(value: any) =>
handleNameChange({ changedName: value }) handleNameChange({ changedName: value })
} }
helperText={ helperText={
@ -436,7 +436,7 @@ const VerifyCommit = forwardRef(
id="desc" id="desc"
fullWidth fullWidth
multiline multiline
onSave={(value) => onSave={(value: any) =>
handleDescChange({ changedDesc: value }) handleDescChange({ changedDesc: value })
} }
/> />
@ -469,7 +469,7 @@ const VerifyCommit = forwardRef(
fullWidth fullWidth
multiline multiline
error={checkSubjectValidation()} error={checkSubjectValidation()}
onSave={(value) => onSave={(value: any) =>
handleSubjectChange({ changedSubject: value }) handleSubjectChange({ changedSubject: value })
} }
helperText={ helperText={

View File

@ -27,6 +27,7 @@ require (
go.mongodb.org/mongo-driver v1.3.5 go.mongodb.org/mongo-driver v1.3.5
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9
golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e // indirect golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e // indirect
golang.org/x/tools v0.0.0-20200428211428-0c9eba77bc32
gopkg.in/src-d/go-git.v4 v4.13.1 gopkg.in/src-d/go-git.v4 v4.13.1
gopkg.in/yaml.v2 v2.3.0 gopkg.in/yaml.v2 v2.3.0
k8s.io/apimachinery v0.18.6 k8s.io/apimachinery v0.18.6

View File

@ -643,6 +643,8 @@ github.com/litmuschaos/chaos-operator v0.0.0-20210224131102-ca6a465ed348/go.mod
github.com/litmuschaos/chaos-scheduler v0.0.0-20210607090343-9952190ad032 h1:Nza94oOqOsao8eFWC19iFviS8XsxS2eVk7Q0a9WDKBE= github.com/litmuschaos/chaos-scheduler v0.0.0-20210607090343-9952190ad032 h1:Nza94oOqOsao8eFWC19iFviS8XsxS2eVk7Q0a9WDKBE=
github.com/litmuschaos/chaos-scheduler v0.0.0-20210607090343-9952190ad032/go.mod h1:7EO6kbZKeJGKzkchgQepCxywvqNFNvNHW0G+u9923AY= github.com/litmuschaos/chaos-scheduler v0.0.0-20210607090343-9952190ad032/go.mod h1:7EO6kbZKeJGKzkchgQepCxywvqNFNvNHW0G+u9923AY=
github.com/litmuschaos/elves v0.0.0-20201107015738-552d74669e3c/go.mod h1:DsbHGNUq/78NZozWVVI9Q6eBei4I+JjlkkD5aibJ3MQ= github.com/litmuschaos/elves v0.0.0-20201107015738-552d74669e3c/go.mod h1:DsbHGNUq/78NZozWVVI9Q6eBei4I+JjlkkD5aibJ3MQ=
github.com/litmuschaos/litmus v0.0.0-20210610061227-c0d001df3f33 h1:TdJzS++HpQWypGHPXyUGSQzN7K5eajy9/K34UQLVSBw=
github.com/litmuschaos/litmus v0.0.0-20210610070956-555e651c89ea h1:nWDzJZvpiJc37yKO456Cv9TRukS5PoeO/pSmRtxVb6A=
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/lpabon/godbc v0.1.1/go.mod h1:Jo9QV0cf3U6jZABgiJ2skINAXb9j8m51r07g4KI92ZA= github.com/lpabon/godbc v0.1.1/go.mod h1:Jo9QV0cf3U6jZABgiJ2skINAXb9j8m51r07g4KI92ZA=
github.com/lucas-clemente/aes12 v0.0.0-20171027163421-cd47fb39b79f/go.mod h1:JpH9J1c9oX6otFSgdUHwUBUizmKlrMjxWnIAjff4m04= github.com/lucas-clemente/aes12 v0.0.0-20171027163421-cd47fb39b79f/go.mod h1:JpH9J1c9oX6otFSgdUHwUBUizmKlrMjxWnIAjff4m04=

View File

@ -186,6 +186,11 @@ type ComplexityRoot struct {
URL func(childComplexity int) int URL func(childComplexity int) int
} }
ListWorkflowsOutput struct {
TotalNoOfWorkflows func(childComplexity int) int
Workflows func(childComplexity int) int
}
Maintainer struct { Maintainer struct {
Email func(childComplexity int) int Email func(childComplexity int) int
Name func(childComplexity int) int Name func(childComplexity int) int
@ -340,7 +345,6 @@ type ComplexityRoot struct {
GetPromLabelNamesAndValues func(childComplexity int, series *model.PromSeriesInput) int GetPromLabelNamesAndValues func(childComplexity int, series *model.PromSeriesInput) int
GetPromQuery func(childComplexity int, query *model.PromInput) int GetPromQuery func(childComplexity int, query *model.PromInput) int
GetPromSeriesList func(childComplexity int, dsDetails *model.DsDetails) int GetPromSeriesList func(childComplexity int, dsDetails *model.DsDetails) int
GetScheduledWorkflows func(childComplexity int, projectID string) int
GetTemplateManifestByID func(childComplexity int, templateID string) int GetTemplateManifestByID func(childComplexity int, templateID string) int
GetUser func(childComplexity int, username string) int GetUser func(childComplexity int, username string) int
GetWorkflowRuns func(childComplexity int, workflowRunsInput model.GetWorkflowRunsInput) int GetWorkflowRuns func(childComplexity int, workflowRunsInput model.GetWorkflowRunsInput) int
@ -350,7 +354,7 @@ type ComplexityRoot struct {
ListImageRegistry func(childComplexity int, projectID string) int ListImageRegistry func(childComplexity int, projectID string) int
ListManifestTemplate func(childComplexity int, projectID string) int ListManifestTemplate func(childComplexity int, projectID string) int
ListProjects func(childComplexity int) int ListProjects func(childComplexity int) int
ListWorkflow func(childComplexity int, projectID string, workflowIds []*string) int ListWorkflow func(childComplexity int, workflowInput model.ListWorkflowsInput) int
Users func(childComplexity int) int Users func(childComplexity int) int
} }
@ -431,7 +435,6 @@ type ComplexityRoot struct {
WorkflowManifest func(childComplexity int) int WorkflowManifest func(childComplexity int) int
WorkflowName func(childComplexity int) int WorkflowName func(childComplexity int) int
WorkflowRuns func(childComplexity int) int WorkflowRuns func(childComplexity int) int
WorkflowType func(childComplexity int) int
} }
WorkflowRun struct { WorkflowRun struct {
@ -626,8 +629,7 @@ type QueryResolver interface {
GetProject(ctx context.Context, projectID string) (*model.Project, error) GetProject(ctx context.Context, projectID string) (*model.Project, error)
ListProjects(ctx context.Context) ([]*model.Project, error) ListProjects(ctx context.Context) ([]*model.Project, error)
Users(ctx context.Context) ([]*model.User, error) Users(ctx context.Context) ([]*model.User, error)
GetScheduledWorkflows(ctx context.Context, projectID string) ([]*model.ScheduledWorkflows, error) ListWorkflow(ctx context.Context, workflowInput model.ListWorkflowsInput) (*model.ListWorkflowsOutput, error)
ListWorkflow(ctx context.Context, projectID string, workflowIds []*string) ([]*model.Workflow, error)
GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error) GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error)
GetHubExperiment(ctx context.Context, experimentInput model.ExperimentInput) (*model.Chart, error) GetHubExperiment(ctx context.Context, experimentInput model.ExperimentInput) (*model.Chart, error)
GetHubStatus(ctx context.Context, projectID string) ([]*model.MyHubStatus, error) GetHubStatus(ctx context.Context, projectID string) ([]*model.MyHubStatus, error)
@ -1305,6 +1307,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Link.URL(childComplexity), true return e.complexity.Link.URL(childComplexity), true
case "ListWorkflowsOutput.total_no_of_workflows":
if e.complexity.ListWorkflowsOutput.TotalNoOfWorkflows == nil {
break
}
return e.complexity.ListWorkflowsOutput.TotalNoOfWorkflows(childComplexity), true
case "ListWorkflowsOutput.workflows":
if e.complexity.ListWorkflowsOutput.Workflows == nil {
break
}
return e.complexity.ListWorkflowsOutput.Workflows(childComplexity), true
case "Maintainer.Email": case "Maintainer.Email":
if e.complexity.Maintainer.Email == nil { if e.complexity.Maintainer.Email == nil {
break break
@ -2415,18 +2431,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Query.GetPromSeriesList(childComplexity, args["ds_details"].(*model.DsDetails)), true return e.complexity.Query.GetPromSeriesList(childComplexity, args["ds_details"].(*model.DsDetails)), true
case "Query.getScheduledWorkflows":
if e.complexity.Query.GetScheduledWorkflows == nil {
break
}
args, err := ec.field_Query_getScheduledWorkflows_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Query.GetScheduledWorkflows(childComplexity, args["project_id"].(string)), true
case "Query.GetTemplateManifestByID": case "Query.GetTemplateManifestByID":
if e.complexity.Query.GetTemplateManifestByID == nil { if e.complexity.Query.GetTemplateManifestByID == nil {
break break
@ -2540,7 +2544,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return 0, false return 0, false
} }
return e.complexity.Query.ListWorkflow(childComplexity, args["project_id"].(string), args["workflow_ids"].([]*string)), true return e.complexity.Query.ListWorkflow(childComplexity, args["workflowInput"].(model.ListWorkflowsInput)), true
case "Query.users": case "Query.users":
if e.complexity.Query.Users == nil { if e.complexity.Query.Users == nil {
@ -3001,13 +3005,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Workflow.WorkflowRuns(childComplexity), true return e.complexity.Workflow.WorkflowRuns(childComplexity), true
case "Workflow.workflow_type":
if e.complexity.Workflow.WorkflowType == nil {
break
}
return e.complexity.Workflow.WorkflowType(childComplexity), true
case "WorkflowRun.cluster_id": case "WorkflowRun.cluster_id":
if e.complexity.WorkflowRun.ClusterID == nil { if e.complexity.WorkflowRun.ClusterID == nil {
break break
@ -4269,7 +4266,7 @@ input PodLogRequest {
} }
type ScheduledWorkflows { type ScheduledWorkflows {
workflow_type:String! workflow_type: String!
workflow_id: String! workflow_id: String!
workflow_manifest: String! workflow_manifest: String!
cronSyntax: String! cronSyntax: String!
@ -4286,25 +4283,6 @@ type ScheduledWorkflows {
isRemoved: Boolean! isRemoved: Boolean!
} }
type Workflow {
workflow_type:String!
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
cluster_name: String!
workflow_name: String!
workflow_description: String!
weightages: [weightages!]!
isCustomWorkflow: Boolean!
updated_at: String!
created_at: String!
project_id: ID!
cluster_id: ID!
cluster_type: String!
isRemoved: Boolean!
workflow_runs: [WorkflowRuns]
}
type WorkflowRuns { type WorkflowRuns {
execution_data: String! execution_data: String!
workflow_run_id: ID! workflow_run_id: ID!
@ -4404,10 +4382,8 @@ type Query {
users: [User!]! @authorized users: [User!]! @authorized
# [Deprecated soon] ListWorkflow(workflowInput: ListWorkflowsInput!): ListWorkflowsOutput!
getScheduledWorkflows(project_id: String!): [ScheduledWorkflows]! @authorized @authorized
ListWorkflow(project_id: String!, workflow_ids: [ID]): [Workflow]! @authorized
getCharts(HubName: String!, projectID: String!): [Chart!]! @authorized getCharts(HubName: String!, projectID: String!): [Chart!]! @authorized
@ -4425,7 +4401,8 @@ type Query {
GetPromQuery(query: promInput): promResponse! @authorized GetPromQuery(query: promInput): promResponse! @authorized
GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse! @authorized GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse!
@authorized
GetPromSeriesList(ds_details: dsDetails): promSeriesListResponse! @authorized GetPromSeriesList(ds_details: dsDetails): promSeriesListResponse! @authorized
@ -4461,13 +4438,16 @@ type Mutation {
## Workflow APIs ## Workflow APIs
# It is used to create chaosworkflow # It is used to create chaosworkflow
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse! @authorized createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse!
@authorized
reRunChaosWorkFlow(workflowID: String!): String! @authorized reRunChaosWorkFlow(workflowID: String!): String! @authorized
deleteChaosWorkflow(workflowid: String, workflow_run_id: String): Boolean! @authorized deleteChaosWorkflow(workflowid: String, workflow_run_id: String): Boolean!
@authorized
syncWorkflow(workflowid: String!, workflow_run_id: String!): Boolean! @authorized syncWorkflow(workflowid: String!, workflow_run_id: String!): Boolean!
@authorized
#Used for sending invitation #Used for sending invitation
sendInvitation(member: MemberInput!): Member @authorized sendInvitation(member: MemberInput!): Member @authorized
@ -4485,7 +4465,8 @@ type Mutation {
leaveProject(member: MemberInput!): String! @authorized leaveProject(member: MemberInput!): String! @authorized
#Used to update project name #Used to update project name
updateProjectName(projectID: String!, projectName: String!): String! @authorized updateProjectName(projectID: String!, projectName: String!): String!
@authorized
#It is used to confirm the subscriber registration #It is used to confirm the subscriber registration
clusterConfirm(identity: ClusterIdentity!): ClusterConfirmResponse! clusterConfirm(identity: ClusterIdentity!): ClusterConfirmResponse!
@ -4505,7 +4486,8 @@ type Mutation {
syncHub(id: ID!): [MyHubStatus!]! @authorized syncHub(id: ID!): [MyHubStatus!]! @authorized
updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse! @authorized updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse!
@authorized
deleteClusterReg(cluster_id: String!): String! @authorized deleteClusterReg(cluster_id: String!): String! @authorized
@ -4540,7 +4522,8 @@ type Mutation {
deleteDataSource(input: deleteDSInput!): Boolean! @authorized deleteDataSource(input: deleteDSInput!): Boolean! @authorized
# Manifest Template # Manifest Template
createManifestTemplate(templateInput: TemplateInput): ManifestTemplate! @authorized createManifestTemplate(templateInput: TemplateInput): ManifestTemplate!
@authorized
deleteManifestTemplate(template_id: String!): Boolean! @authorized deleteManifestTemplate(template_id: String!): Boolean! @authorized
@ -4556,7 +4539,8 @@ type Mutation {
imageRegistryInfo: imageRegistryInput! imageRegistryInfo: imageRegistryInput!
): ImageRegistryResponse! @authorized ): ImageRegistryResponse! @authorized
deleteImageRegistry(image_registry_id: String!, project_id: String!): String! @authorized deleteImageRegistry(image_registry_id: String!, project_id: String!): String!
@authorized
} }
type Subscription { type Subscription {
@ -4570,7 +4554,8 @@ type Subscription {
#It is used to listen cluster operation request from the graphql server #It is used to listen cluster operation request from the graphql server
clusterConnect(clusterInfo: ClusterIdentity!): ClusterAction! clusterConnect(clusterInfo: ClusterIdentity!): ClusterAction!
getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse! @authorized getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse!
@authorized
} }
`, BuiltIn: false}, `, BuiltIn: false},
&ast.Source{Name: "graph/usermanagement.graphqls", Input: `type User { &ast.Source{Name: "graph/usermanagement.graphqls", Input: `type User {
@ -4633,7 +4618,7 @@ enum WorkflowRunSortingField {
Time Time
} }
input SortInput { input WorkflowRunSortInput {
field: WorkflowRunSortingField! field: WorkflowRunSortingField!
descending: Boolean descending: Boolean
} }
@ -4642,7 +4627,7 @@ input GetWorkflowRunsInput {
project_id: ID! project_id: ID!
workflow_run_ids: [ID] workflow_run_ids: [ID]
pagination: Pagination pagination: Pagination
sort: SortInput sort: WorkflowRunSortInput
filter: WorkflowRunFilterInput filter: WorkflowRunFilterInput
} }
@ -4667,6 +4652,51 @@ type GetWorkflowsOutput {
total_no_of_workflow_runs: Int! total_no_of_workflow_runs: Int!
workflow_runs: [WorkflowRun]! workflow_runs: [WorkflowRun]!
} }
input WorkflowFilterInput {
workflow_name: String
cluster_name: String
}
input ListWorkflowsInput {
project_id: ID!
workflow_ids: [ID]
pagination: Pagination
sort: WorkflowSortInput
filter: WorkflowFilterInput
}
enum WorkflowSortingField {
Name
}
input WorkflowSortInput {
field: WorkflowSortingField!
descending: Boolean
}
type Workflow {
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
cluster_name: String!
workflow_name: String!
workflow_description: String!
weightages: [weightages!]!
isCustomWorkflow: Boolean!
updated_at: String!
created_at: String!
project_id: ID!
cluster_id: ID!
cluster_type: String!
isRemoved: Boolean!
workflow_runs: [WorkflowRuns]
}
type ListWorkflowsOutput {
total_no_of_workflows: Int!
workflows: [Workflow]!
}
`, BuiltIn: false}, `, BuiltIn: false},
} }
var parsedSchema = gqlparser.MustLoadSchema(sources...) var parsedSchema = gqlparser.MustLoadSchema(sources...)
@ -5524,22 +5554,14 @@ func (ec *executionContext) field_Query_ListManifestTemplate_args(ctx context.Co
func (ec *executionContext) field_Query_ListWorkflow_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { func (ec *executionContext) field_Query_ListWorkflow_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error var err error
args := map[string]interface{}{} args := map[string]interface{}{}
var arg0 string var arg0 model.ListWorkflowsInput
if tmp, ok := rawArgs["project_id"]; ok { if tmp, ok := rawArgs["workflowInput"]; ok {
arg0, err = ec.unmarshalNString2string(ctx, tmp) arg0, err = ec.unmarshalNListWorkflowsInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsInput(ctx, tmp)
if err != nil { if err != nil {
return nil, err return nil, err
} }
} }
args["project_id"] = arg0 args["workflowInput"] = arg0
var arg1 []*string
if tmp, ok := rawArgs["workflow_ids"]; ok {
arg1, err = ec.unmarshalOID2ᚕᚖstring(ctx, tmp)
if err != nil {
return nil, err
}
}
args["workflow_ids"] = arg1
return args, nil return args, nil
} }
@ -5657,20 +5679,6 @@ func (ec *executionContext) field_Query_getProject_args(ctx context.Context, raw
return args, nil return args, nil
} }
func (ec *executionContext) field_Query_getScheduledWorkflows_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 string
if tmp, ok := rawArgs["project_id"]; ok {
arg0, err = ec.unmarshalNString2string(ctx, tmp)
if err != nil {
return nil, err
}
}
args["project_id"] = arg0
return args, nil
}
func (ec *executionContext) field_Query_getUser_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { func (ec *executionContext) field_Query_getUser_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error var err error
args := map[string]interface{}{} args := map[string]interface{}{}
@ -8811,6 +8819,74 @@ func (ec *executionContext) _Link_Url(ctx context.Context, field graphql.Collect
return ec.marshalNString2string(ctx, field.Selections, res) return ec.marshalNString2string(ctx, field.Selections, res)
} }
func (ec *executionContext) _ListWorkflowsOutput_total_no_of_workflows(ctx context.Context, field graphql.CollectedField, obj *model.ListWorkflowsOutput) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "ListWorkflowsOutput",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalNoOfWorkflows, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) _ListWorkflowsOutput_workflows(ctx context.Context, field graphql.CollectedField, obj *model.ListWorkflowsOutput) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "ListWorkflowsOutput",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Workflows, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]*model.Workflow)
fc.Result = res
return ec.marshalNWorkflow2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflow(ctx, field.Selections, res)
}
func (ec *executionContext) _Maintainer_Name(ctx context.Context, field graphql.CollectedField, obj *model.Maintainer) (ret graphql.Marshaler) { func (ec *executionContext) _Maintainer_Name(ctx context.Context, field graphql.CollectedField, obj *model.Maintainer) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
@ -13833,67 +13909,6 @@ func (ec *executionContext) _Query_users(ctx context.Context, field graphql.Coll
return ec.marshalNUser2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐUserᚄ(ctx, field.Selections, res) return ec.marshalNUser2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐUserᚄ(ctx, field.Selections, res)
} }
func (ec *executionContext) _Query_getScheduledWorkflows(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Query",
Field: field,
Args: nil,
IsMethod: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field_Query_getScheduledWorkflows_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
directive0 := func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().GetScheduledWorkflows(rctx, args["project_id"].(string))
}
directive1 := func(ctx context.Context) (interface{}, error) {
if ec.directives.Authorized == nil {
return nil, errors.New("directive authorized is not implemented")
}
return ec.directives.Authorized(ctx, nil, directive0)
}
tmp, err := directive1(rctx)
if err != nil {
return nil, err
}
if tmp == nil {
return nil, nil
}
if data, ok := tmp.([]*model.ScheduledWorkflows); ok {
return data, nil
}
return nil, fmt.Errorf(`unexpected type %T from directive, should be []*github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model.ScheduledWorkflows`, tmp)
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]*model.ScheduledWorkflows)
fc.Result = res
return ec.marshalNScheduledWorkflows2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx, field.Selections, res)
}
func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
@ -13919,7 +13934,7 @@ func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graph
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
directive0 := func(rctx context.Context) (interface{}, error) { directive0 := func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().ListWorkflow(rctx, args["project_id"].(string), args["workflow_ids"].([]*string)) return ec.resolvers.Query().ListWorkflow(rctx, args["workflowInput"].(model.ListWorkflowsInput))
} }
directive1 := func(ctx context.Context) (interface{}, error) { directive1 := func(ctx context.Context) (interface{}, error) {
if ec.directives.Authorized == nil { if ec.directives.Authorized == nil {
@ -13935,10 +13950,10 @@ func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graph
if tmp == nil { if tmp == nil {
return nil, nil return nil, nil
} }
if data, ok := tmp.([]*model.Workflow); ok { if data, ok := tmp.(*model.ListWorkflowsOutput); ok {
return data, nil return data, nil
} }
return nil, fmt.Errorf(`unexpected type %T from directive, should be []*github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model.Workflow`, tmp) return nil, fmt.Errorf(`unexpected type %T from directive, should be *github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model.ListWorkflowsOutput`, tmp)
}) })
if err != nil { if err != nil {
ec.Error(ctx, err) ec.Error(ctx, err)
@ -13950,9 +13965,9 @@ func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graph
} }
return graphql.Null return graphql.Null
} }
res := resTmp.([]*model.Workflow) res := resTmp.(*model.ListWorkflowsOutput)
fc.Result = res fc.Result = res
return ec.marshalNWorkflow2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflow(ctx, field.Selections, res) return ec.marshalNListWorkflowsOutput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsOutput(ctx, field.Selections, res)
} }
func (ec *executionContext) _Query_getCharts(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { func (ec *executionContext) _Query_getCharts(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
@ -16642,40 +16657,6 @@ func (ec *executionContext) _User_removed_at(ctx context.Context, field graphql.
return ec.marshalNString2string(ctx, field.Selections, res) return ec.marshalNString2string(ctx, field.Selections, res)
} }
func (ec *executionContext) _Workflow_workflow_type(ctx context.Context, field graphql.CollectedField, obj *model.Workflow) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Workflow",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.WorkflowType, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _Workflow_workflow_id(ctx context.Context, field graphql.CollectedField, obj *model.Workflow) (ret graphql.Marshaler) { func (ec *executionContext) _Workflow_workflow_id(ctx context.Context, field graphql.CollectedField, obj *model.Workflow) (ret graphql.Marshaler) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
@ -21484,7 +21465,7 @@ func (ec *executionContext) unmarshalInputGetWorkflowRunsInput(ctx context.Conte
} }
case "sort": case "sort":
var err error var err error
it.Sort, err = ec.unmarshalOSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx, v) it.Sort, err = ec.unmarshalOWorkflowRunSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortInput(ctx, v)
if err != nil { if err != nil {
return it, err return it, err
} }
@ -21650,6 +21631,48 @@ func (ec *executionContext) unmarshalInputKubeObjectRequest(ctx context.Context,
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputListWorkflowsInput(ctx context.Context, obj interface{}) (model.ListWorkflowsInput, error) {
var it model.ListWorkflowsInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "project_id":
var err error
it.ProjectID, err = ec.unmarshalNID2string(ctx, v)
if err != nil {
return it, err
}
case "workflow_ids":
var err error
it.WorkflowIds, err = ec.unmarshalOID2ᚕᚖstring(ctx, v)
if err != nil {
return it, err
}
case "pagination":
var err error
it.Pagination, err = ec.unmarshalOPagination2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐPagination(ctx, v)
if err != nil {
return it, err
}
case "sort":
var err error
it.Sort, err = ec.unmarshalOWorkflowSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortInput(ctx, v)
if err != nil {
return it, err
}
case "filter":
var err error
it.Filter, err = ec.unmarshalOWorkflowFilterInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowFilterInput(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputMemberInput(ctx context.Context, obj interface{}) (model.MemberInput, error) { func (ec *executionContext) unmarshalInputMemberInput(ctx context.Context, obj interface{}) (model.MemberInput, error) {
var it model.MemberInput var it model.MemberInput
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -21812,30 +21835,6 @@ func (ec *executionContext) unmarshalInputPodLogRequest(ctx context.Context, obj
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputSortInput(ctx context.Context, obj interface{}) (model.SortInput, error) {
var it model.SortInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "field":
var err error
it.Field, err = ec.unmarshalNWorkflowRunSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortingField(ctx, v)
if err != nil {
return it, err
}
case "descending":
var err error
it.Descending, err = ec.unmarshalOBoolean2ᚖbool(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputTemplateInput(ctx context.Context, obj interface{}) (model.TemplateInput, error) { func (ec *executionContext) unmarshalInputTemplateInput(ctx context.Context, obj interface{}) (model.TemplateInput, error) {
var it model.TemplateInput var it model.TemplateInput
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -22016,6 +22015,30 @@ func (ec *executionContext) unmarshalInputWeightagesInput(ctx context.Context, o
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputWorkflowFilterInput(ctx context.Context, obj interface{}) (model.WorkflowFilterInput, error) {
var it model.WorkflowFilterInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "workflow_name":
var err error
it.WorkflowName, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
case "cluster_name":
var err error
it.ClusterName, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputWorkflowRunFilterInput(ctx context.Context, obj interface{}) (model.WorkflowRunFilterInput, error) { func (ec *executionContext) unmarshalInputWorkflowRunFilterInput(ctx context.Context, obj interface{}) (model.WorkflowRunFilterInput, error) {
var it model.WorkflowRunFilterInput var it model.WorkflowRunFilterInput
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -22106,6 +22129,54 @@ func (ec *executionContext) unmarshalInputWorkflowRunInput(ctx context.Context,
return it, nil return it, nil
} }
func (ec *executionContext) unmarshalInputWorkflowRunSortInput(ctx context.Context, obj interface{}) (model.WorkflowRunSortInput, error) {
var it model.WorkflowRunSortInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "field":
var err error
it.Field, err = ec.unmarshalNWorkflowRunSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortingField(ctx, v)
if err != nil {
return it, err
}
case "descending":
var err error
it.Descending, err = ec.unmarshalOBoolean2ᚖbool(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputWorkflowSortInput(ctx context.Context, obj interface{}) (model.WorkflowSortInput, error) {
var it model.WorkflowSortInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "field":
var err error
it.Field, err = ec.unmarshalNWorkflowSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortingField(ctx, v)
if err != nil {
return it, err
}
case "descending":
var err error
it.Descending, err = ec.unmarshalOBoolean2ᚖbool(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputcreateDBInput(ctx context.Context, obj interface{}) (model.CreateDBInput, error) { func (ec *executionContext) unmarshalInputcreateDBInput(ctx context.Context, obj interface{}) (model.CreateDBInput, error) {
var it model.CreateDBInput var it model.CreateDBInput
var asMap = obj.(map[string]interface{}) var asMap = obj.(map[string]interface{})
@ -23341,6 +23412,38 @@ func (ec *executionContext) _Link(ctx context.Context, sel ast.SelectionSet, obj
return out return out
} }
var listWorkflowsOutputImplementors = []string{"ListWorkflowsOutput"}
func (ec *executionContext) _ListWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, obj *model.ListWorkflowsOutput) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, listWorkflowsOutputImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("ListWorkflowsOutput")
case "total_no_of_workflows":
out.Values[i] = ec._ListWorkflowsOutput_total_no_of_workflows(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
case "workflows":
out.Values[i] = ec._ListWorkflowsOutput_workflows(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var maintainerImplementors = []string{"Maintainer"} var maintainerImplementors = []string{"Maintainer"}
func (ec *executionContext) _Maintainer(ctx context.Context, sel ast.SelectionSet, obj *model.Maintainer) graphql.Marshaler { func (ec *executionContext) _Maintainer(ctx context.Context, sel ast.SelectionSet, obj *model.Maintainer) graphql.Marshaler {
@ -24190,20 +24293,6 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
} }
return res return res
}) })
case "getScheduledWorkflows":
field := field
out.Concurrently(i, func() (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Query_getScheduledWorkflows(ctx, field)
if res == graphql.Null {
atomic.AddUint32(&invalids, 1)
}
return res
})
case "ListWorkflow": case "ListWorkflow":
field := field field := field
out.Concurrently(i, func() (res graphql.Marshaler) { out.Concurrently(i, func() (res graphql.Marshaler) {
@ -24762,11 +24851,6 @@ func (ec *executionContext) _Workflow(ctx context.Context, sel ast.SelectionSet,
switch field.Name { switch field.Name {
case "__typename": case "__typename":
out.Values[i] = graphql.MarshalString("Workflow") out.Values[i] = graphql.MarshalString("Workflow")
case "workflow_type":
out.Values[i] = ec._Workflow_workflow_type(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
case "workflow_id": case "workflow_id":
out.Values[i] = ec._Workflow_workflow_id(ctx, field, obj) out.Values[i] = ec._Workflow_workflow_id(ctx, field, obj)
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
@ -26305,6 +26389,24 @@ func (ec *executionContext) marshalNLink2ᚖgithubᚗcomᚋlitmuschaosᚋlitmus
return ec._Link(ctx, sel, v) return ec._Link(ctx, sel, v)
} }
func (ec *executionContext) unmarshalNListWorkflowsInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsInput(ctx context.Context, v interface{}) (model.ListWorkflowsInput, error) {
return ec.unmarshalInputListWorkflowsInput(ctx, v)
}
func (ec *executionContext) marshalNListWorkflowsOutput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, v model.ListWorkflowsOutput) graphql.Marshaler {
return ec._ListWorkflowsOutput(ctx, sel, &v)
}
func (ec *executionContext) marshalNListWorkflowsOutput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, v *model.ListWorkflowsOutput) graphql.Marshaler {
if v == nil {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
return ec._ListWorkflowsOutput(ctx, sel, v)
}
func (ec *executionContext) marshalNMaintainer2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐMaintainer(ctx context.Context, sel ast.SelectionSet, v model.Maintainer) graphql.Marshaler { func (ec *executionContext) marshalNMaintainer2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐMaintainer(ctx context.Context, sel ast.SelectionSet, v model.Maintainer) graphql.Marshaler {
return ec._Maintainer(ctx, sel, &v) return ec._Maintainer(ctx, sel, &v)
} }
@ -26688,43 +26790,6 @@ func (ec *executionContext) marshalNSSHKey2ᚖgithubᚗcomᚋlitmuschaosᚋlitmu
return ec._SSHKey(ctx, sel, v) return ec._SSHKey(ctx, sel, v)
} }
func (ec *executionContext) marshalNScheduledWorkflows2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx context.Context, sel ast.SelectionSet, v []*model.ScheduledWorkflows) graphql.Marshaler {
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalOScheduledWorkflows2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
return ret
}
func (ec *executionContext) marshalNSpec2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSpec(ctx context.Context, sel ast.SelectionSet, v model.Spec) graphql.Marshaler { func (ec *executionContext) marshalNSpec2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSpec(ctx context.Context, sel ast.SelectionSet, v model.Spec) graphql.Marshaler {
return ec._Spec(ctx, sel, &v) return ec._Spec(ctx, sel, &v)
} }
@ -26974,6 +27039,15 @@ func (ec *executionContext) marshalNWorkflowRunSortingField2githubᚗcomᚋlitmu
return v return v
} }
func (ec *executionContext) unmarshalNWorkflowSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortingField(ctx context.Context, v interface{}) (model.WorkflowSortingField, error) {
var res model.WorkflowSortingField
return res, res.UnmarshalGQL(v)
}
func (ec *executionContext) marshalNWorkflowSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortingField(ctx context.Context, sel ast.SelectionSet, v model.WorkflowSortingField) graphql.Marshaler {
return v
}
func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler { func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler {
return ec.___Directive(ctx, sel, &v) return ec.___Directive(ctx, sel, &v)
} }
@ -27708,29 +27782,6 @@ func (ec *executionContext) unmarshalOPagination2ᚖgithubᚗcomᚋlitmuschaos
return &res, err return &res, err
} }
func (ec *executionContext) marshalOScheduledWorkflows2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx context.Context, sel ast.SelectionSet, v model.ScheduledWorkflows) graphql.Marshaler {
return ec._ScheduledWorkflows(ctx, sel, &v)
}
func (ec *executionContext) marshalOScheduledWorkflows2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx context.Context, sel ast.SelectionSet, v *model.ScheduledWorkflows) graphql.Marshaler {
if v == nil {
return graphql.Null
}
return ec._ScheduledWorkflows(ctx, sel, v)
}
func (ec *executionContext) unmarshalOSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx context.Context, v interface{}) (model.SortInput, error) {
return ec.unmarshalInputSortInput(ctx, v)
}
func (ec *executionContext) unmarshalOSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx context.Context, v interface{}) (*model.SortInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx, v)
return &res, err
}
func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) { func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) {
return graphql.UnmarshalString(v) return graphql.UnmarshalString(v)
} }
@ -27809,6 +27860,18 @@ func (ec *executionContext) marshalOWorkflow2ᚖgithubᚗcomᚋlitmuschaosᚋlit
return ec._Workflow(ctx, sel, v) return ec._Workflow(ctx, sel, v)
} }
func (ec *executionContext) unmarshalOWorkflowFilterInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowFilterInput(ctx context.Context, v interface{}) (model.WorkflowFilterInput, error) {
return ec.unmarshalInputWorkflowFilterInput(ctx, v)
}
func (ec *executionContext) unmarshalOWorkflowFilterInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowFilterInput(ctx context.Context, v interface{}) (*model.WorkflowFilterInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOWorkflowFilterInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowFilterInput(ctx, v)
return &res, err
}
func (ec *executionContext) marshalOWorkflowRun2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx context.Context, sel ast.SelectionSet, v model.WorkflowRun) graphql.Marshaler { func (ec *executionContext) marshalOWorkflowRun2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx context.Context, sel ast.SelectionSet, v model.WorkflowRun) graphql.Marshaler {
return ec._WorkflowRun(ctx, sel, &v) return ec._WorkflowRun(ctx, sel, &v)
} }
@ -27832,6 +27895,18 @@ func (ec *executionContext) unmarshalOWorkflowRunFilterInput2ᚖgithubᚗcomᚋl
return &res, err return &res, err
} }
func (ec *executionContext) unmarshalOWorkflowRunSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortInput(ctx context.Context, v interface{}) (model.WorkflowRunSortInput, error) {
return ec.unmarshalInputWorkflowRunSortInput(ctx, v)
}
func (ec *executionContext) unmarshalOWorkflowRunSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortInput(ctx context.Context, v interface{}) (*model.WorkflowRunSortInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOWorkflowRunSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortInput(ctx, v)
return &res, err
}
func (ec *executionContext) unmarshalOWorkflowRunStatus2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx context.Context, v interface{}) (model.WorkflowRunStatus, error) { func (ec *executionContext) unmarshalOWorkflowRunStatus2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx context.Context, v interface{}) (model.WorkflowRunStatus, error) {
var res model.WorkflowRunStatus var res model.WorkflowRunStatus
return res, res.UnmarshalGQL(v) return res, res.UnmarshalGQL(v)
@ -27907,6 +27982,18 @@ func (ec *executionContext) marshalOWorkflowRuns2ᚖgithubᚗcomᚋlitmuschaos
return ec._WorkflowRuns(ctx, sel, v) return ec._WorkflowRuns(ctx, sel, v)
} }
func (ec *executionContext) unmarshalOWorkflowSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortInput(ctx context.Context, v interface{}) (model.WorkflowSortInput, error) {
return ec.unmarshalInputWorkflowSortInput(ctx, v)
}
func (ec *executionContext) unmarshalOWorkflowSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortInput(ctx context.Context, v interface{}) (*model.WorkflowSortInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOWorkflowSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortInput(ctx, v)
return &res, err
}
func (ec *executionContext) marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler { func (ec *executionContext) marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler {
if v == nil { if v == nil {
return graphql.Null return graphql.Null

View File

@ -220,7 +220,7 @@ type GetWorkflowRunsInput struct {
ProjectID string `json:"project_id"` ProjectID string `json:"project_id"`
WorkflowRunIds []*string `json:"workflow_run_ids"` WorkflowRunIds []*string `json:"workflow_run_ids"`
Pagination *Pagination `json:"pagination"` Pagination *Pagination `json:"pagination"`
Sort *SortInput `json:"sort"` Sort *WorkflowRunSortInput `json:"sort"`
Filter *WorkflowRunFilterInput `json:"filter"` Filter *WorkflowRunFilterInput `json:"filter"`
} }
@ -289,6 +289,19 @@ type Link struct {
URL string `json:"Url"` URL string `json:"Url"`
} }
type ListWorkflowsInput struct {
ProjectID string `json:"project_id"`
WorkflowIds []*string `json:"workflow_ids"`
Pagination *Pagination `json:"pagination"`
Sort *WorkflowSortInput `json:"sort"`
Filter *WorkflowFilterInput `json:"filter"`
}
type ListWorkflowsOutput struct {
TotalNoOfWorkflows int `json:"total_no_of_workflows"`
Workflows []*Workflow `json:"workflows"`
}
type Maintainer struct { type Maintainer struct {
Name string `json:"Name"` Name string `json:"Name"`
Email string `json:"Email"` Email string `json:"Email"`
@ -438,11 +451,6 @@ type ScheduledWorkflows struct {
IsRemoved bool `json:"isRemoved"` IsRemoved bool `json:"isRemoved"`
} }
type SortInput struct {
Field WorkflowRunSortingField `json:"field"`
Descending *bool `json:"descending"`
}
type Spec struct { type Spec struct {
DisplayName string `json:"DisplayName"` DisplayName string `json:"DisplayName"`
CategoryDescription string `json:"CategoryDescription"` CategoryDescription string `json:"CategoryDescription"`
@ -508,7 +516,6 @@ type WeightagesInput struct {
} }
type Workflow struct { type Workflow struct {
WorkflowType string `json:"workflow_type"`
WorkflowID string `json:"workflow_id"` WorkflowID string `json:"workflow_id"`
WorkflowManifest string `json:"workflow_manifest"` WorkflowManifest string `json:"workflow_manifest"`
CronSyntax string `json:"cronSyntax"` CronSyntax string `json:"cronSyntax"`
@ -526,6 +533,11 @@ type Workflow struct {
WorkflowRuns []*WorkflowRuns `json:"workflow_runs"` WorkflowRuns []*WorkflowRuns `json:"workflow_runs"`
} }
type WorkflowFilterInput struct {
WorkflowName *string `json:"workflow_name"`
ClusterName *string `json:"cluster_name"`
}
type WorkflowRun struct { type WorkflowRun struct {
WorkflowRunID string `json:"workflow_run_id"` WorkflowRunID string `json:"workflow_run_id"`
WorkflowID string `json:"workflow_id"` WorkflowID string `json:"workflow_id"`
@ -560,12 +572,22 @@ type WorkflowRunInput struct {
IsRemoved *bool `json:"isRemoved"` IsRemoved *bool `json:"isRemoved"`
} }
type WorkflowRunSortInput struct {
Field WorkflowRunSortingField `json:"field"`
Descending *bool `json:"descending"`
}
type WorkflowRuns struct { type WorkflowRuns struct {
ExecutionData string `json:"execution_data"` ExecutionData string `json:"execution_data"`
WorkflowRunID string `json:"workflow_run_id"` WorkflowRunID string `json:"workflow_run_id"`
LastUpdated string `json:"last_updated"` LastUpdated string `json:"last_updated"`
} }
type WorkflowSortInput struct {
Field WorkflowSortingField `json:"field"`
Descending *bool `json:"descending"`
}
type AnnotationsPromResponse struct { type AnnotationsPromResponse struct {
Queryid string `json:"queryid"` Queryid string `json:"queryid"`
Legends []*string `json:"legends"` Legends []*string `json:"legends"`
@ -955,3 +977,42 @@ func (e *WorkflowRunStatus) UnmarshalGQL(v interface{}) error {
func (e WorkflowRunStatus) MarshalGQL(w io.Writer) { func (e WorkflowRunStatus) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String())) fmt.Fprint(w, strconv.Quote(e.String()))
} }
type WorkflowSortingField string
const (
WorkflowSortingFieldName WorkflowSortingField = "Name"
)
var AllWorkflowSortingField = []WorkflowSortingField{
WorkflowSortingFieldName,
}
func (e WorkflowSortingField) IsValid() bool {
switch e {
case WorkflowSortingFieldName:
return true
}
return false
}
func (e WorkflowSortingField) String() string {
return string(e)
}
func (e *WorkflowSortingField) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = WorkflowSortingField(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid WorkflowSortingField", str)
}
return nil
}
func (e WorkflowSortingField) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}

View File

@ -152,7 +152,7 @@ input PodLogRequest {
} }
type ScheduledWorkflows { type ScheduledWorkflows {
workflow_type:String! workflow_type: String!
workflow_id: String! workflow_id: String!
workflow_manifest: String! workflow_manifest: String!
cronSyntax: String! cronSyntax: String!
@ -169,25 +169,6 @@ type ScheduledWorkflows {
isRemoved: Boolean! isRemoved: Boolean!
} }
type Workflow {
workflow_type:String!
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
cluster_name: String!
workflow_name: String!
workflow_description: String!
weightages: [weightages!]!
isCustomWorkflow: Boolean!
updated_at: String!
created_at: String!
project_id: ID!
cluster_id: ID!
cluster_type: String!
isRemoved: Boolean!
workflow_runs: [WorkflowRuns]
}
type WorkflowRuns { type WorkflowRuns {
execution_data: String! execution_data: String!
workflow_run_id: ID! workflow_run_id: ID!
@ -287,10 +268,8 @@ type Query {
users: [User!]! @authorized users: [User!]! @authorized
# [Deprecated soon] ListWorkflow(workflowInput: ListWorkflowsInput!): ListWorkflowsOutput!
getScheduledWorkflows(project_id: String!): [ScheduledWorkflows]! @authorized @authorized
ListWorkflow(project_id: String!, workflow_ids: [ID]): [Workflow]! @authorized
getCharts(HubName: String!, projectID: String!): [Chart!]! @authorized getCharts(HubName: String!, projectID: String!): [Chart!]! @authorized
@ -308,7 +287,8 @@ type Query {
GetPromQuery(query: promInput): promResponse! @authorized GetPromQuery(query: promInput): promResponse! @authorized
GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse! @authorized GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse!
@authorized
GetPromSeriesList(ds_details: dsDetails): promSeriesListResponse! @authorized GetPromSeriesList(ds_details: dsDetails): promSeriesListResponse! @authorized
@ -344,13 +324,16 @@ type Mutation {
## Workflow APIs ## Workflow APIs
# It is used to create chaosworkflow # It is used to create chaosworkflow
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse! @authorized createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse!
@authorized
reRunChaosWorkFlow(workflowID: String!): String! @authorized reRunChaosWorkFlow(workflowID: String!): String! @authorized
deleteChaosWorkflow(workflowid: String, workflow_run_id: String): Boolean! @authorized deleteChaosWorkflow(workflowid: String, workflow_run_id: String): Boolean!
@authorized
syncWorkflow(workflowid: String!, workflow_run_id: String!): Boolean! @authorized syncWorkflow(workflowid: String!, workflow_run_id: String!): Boolean!
@authorized
#Used for sending invitation #Used for sending invitation
sendInvitation(member: MemberInput!): Member @authorized sendInvitation(member: MemberInput!): Member @authorized
@ -368,7 +351,8 @@ type Mutation {
leaveProject(member: MemberInput!): String! @authorized leaveProject(member: MemberInput!): String! @authorized
#Used to update project name #Used to update project name
updateProjectName(projectID: String!, projectName: String!): String! @authorized updateProjectName(projectID: String!, projectName: String!): String!
@authorized
#It is used to confirm the subscriber registration #It is used to confirm the subscriber registration
clusterConfirm(identity: ClusterIdentity!): ClusterConfirmResponse! clusterConfirm(identity: ClusterIdentity!): ClusterConfirmResponse!
@ -388,7 +372,8 @@ type Mutation {
syncHub(id: ID!): [MyHubStatus!]! @authorized syncHub(id: ID!): [MyHubStatus!]! @authorized
updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse! @authorized updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse!
@authorized
deleteClusterReg(cluster_id: String!): String! @authorized deleteClusterReg(cluster_id: String!): String! @authorized
@ -423,7 +408,8 @@ type Mutation {
deleteDataSource(input: deleteDSInput!): Boolean! @authorized deleteDataSource(input: deleteDSInput!): Boolean! @authorized
# Manifest Template # Manifest Template
createManifestTemplate(templateInput: TemplateInput): ManifestTemplate! @authorized createManifestTemplate(templateInput: TemplateInput): ManifestTemplate!
@authorized
deleteManifestTemplate(template_id: String!): Boolean! @authorized deleteManifestTemplate(template_id: String!): Boolean! @authorized
@ -439,7 +425,8 @@ type Mutation {
imageRegistryInfo: imageRegistryInput! imageRegistryInfo: imageRegistryInput!
): ImageRegistryResponse! @authorized ): ImageRegistryResponse! @authorized
deleteImageRegistry(image_registry_id: String!, project_id: String!): String! @authorized deleteImageRegistry(image_registry_id: String!, project_id: String!): String!
@authorized
} }
type Subscription { type Subscription {
@ -453,5 +440,6 @@ type Subscription {
#It is used to listen cluster operation request from the graphql server #It is used to listen cluster operation request from the graphql server
clusterConnect(clusterInfo: ClusterIdentity!): ClusterAction! clusterConnect(clusterInfo: ClusterIdentity!): ClusterAction!
getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse! @authorized getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse!
@authorized
} }

View File

@ -337,20 +337,12 @@ func (r *queryResolver) Users(ctx context.Context) ([]*model.User, error) {
return usermanagement.GetUsers(ctx) return usermanagement.GetUsers(ctx)
} }
func (r *queryResolver) GetScheduledWorkflows(ctx context.Context, projectID string) ([]*model.ScheduledWorkflows, error) { func (r *queryResolver) ListWorkflow(ctx context.Context, workflowInput model.ListWorkflowsInput) (*model.ListWorkflowsOutput, error) {
err := validate.ValidateRole(ctx, projectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation) err := validate.ValidateRole(ctx, workflowInput.ProjectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return wfHandler.QueryWorkflows(projectID) return wfHandler.QueryListWorkflow(workflowInput)
}
func (r *queryResolver) ListWorkflow(ctx context.Context, projectID string, workflowIds []*string) ([]*model.Workflow, error) {
err := validate.ValidateRole(ctx, projectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation)
if err != nil {
return nil, err
}
return wfHandler.QueryListWorkflow(projectID, workflowIds)
} }
func (r *queryResolver) GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error) { func (r *queryResolver) GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error) {

View File

@ -27,7 +27,7 @@ enum WorkflowRunSortingField {
Time Time
} }
input SortInput { input WorkflowRunSortInput {
field: WorkflowRunSortingField! field: WorkflowRunSortingField!
descending: Boolean descending: Boolean
} }
@ -36,7 +36,7 @@ input GetWorkflowRunsInput {
project_id: ID! project_id: ID!
workflow_run_ids: [ID] workflow_run_ids: [ID]
pagination: Pagination pagination: Pagination
sort: SortInput sort: WorkflowRunSortInput
filter: WorkflowRunFilterInput filter: WorkflowRunFilterInput
} }
@ -61,3 +61,48 @@ type GetWorkflowsOutput {
total_no_of_workflow_runs: Int! total_no_of_workflow_runs: Int!
workflow_runs: [WorkflowRun]! workflow_runs: [WorkflowRun]!
} }
input WorkflowFilterInput {
workflow_name: String
cluster_name: String
}
input ListWorkflowsInput {
project_id: ID!
workflow_ids: [ID]
pagination: Pagination
sort: WorkflowSortInput
filter: WorkflowFilterInput
}
enum WorkflowSortingField {
Name
}
input WorkflowSortInput {
field: WorkflowSortingField!
descending: Boolean
}
type Workflow {
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
cluster_name: String!
workflow_name: String!
workflow_description: String!
weightages: [weightages!]!
isCustomWorkflow: Boolean!
updated_at: String!
created_at: String!
project_id: ID!
cluster_id: ID!
cluster_type: String!
isRemoved: Boolean!
workflow_runs: [WorkflowRuns]
}
type ListWorkflowsOutput {
total_no_of_workflows: Int!
workflows: [Workflow]!
}

View File

@ -10,13 +10,12 @@ import (
"strings" "strings"
"time" "time"
"go.mongodb.org/mongo-driver/mongo"
"github.com/tidwall/gjson" "github.com/tidwall/gjson"
"github.com/tidwall/sjson" "github.com/tidwall/sjson"
"github.com/jinzhu/copier" "github.com/jinzhu/copier"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model" "github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model"
@ -144,12 +143,12 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
var pipeline mongo.Pipeline var pipeline mongo.Pipeline
// Match with projectID // Match with projectID
matchStage := bson.D{ matchProjectIdStage := bson.D{
{"$match", bson.D{ {"$match", bson.D{
{"project_id", input.ProjectID}, {"project_id", input.ProjectID},
}}, }},
} }
pipeline = append(pipeline, matchStage) pipeline = append(pipeline, matchProjectIdStage)
includeAllFromWorkflow := bson.D{ includeAllFromWorkflow := bson.D{
{"workflow_id", 1}, {"workflow_id", 1},
@ -169,7 +168,7 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
} }
// Filter the available workflows where isRemoved is false // Filter the available workflows where isRemoved is false
matchWfRemovedStage := bson.D{ matchWfRunIsRemovedStage := bson.D{
{"$project", append(includeAllFromWorkflow, {"$project", append(includeAllFromWorkflow,
bson.E{Key: "workflow_runs", Value: bson.D{ bson.E{Key: "workflow_runs", Value: bson.D{
{"$filter", bson.D{ {"$filter", bson.D{
@ -182,7 +181,7 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
}}, }},
)}, )},
} }
pipeline = append(pipeline, matchWfRemovedStage) pipeline = append(pipeline, matchWfRunIsRemovedStage)
// Match the pipelineIds from the input array // Match the pipelineIds from the input array
if len(input.WorkflowRunIds) != 0 { if len(input.WorkflowRunIds) != 0 {
@ -358,7 +357,7 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
var workflows []dbSchemaWorkflow.AggregatedWorkflowRuns var workflows []dbSchemaWorkflow.AggregatedWorkflowRuns
if err = workflowsCursor.All(context.Background(), &workflows); err != nil { if err = workflowsCursor.All(context.Background(), &workflows); err != nil || len(workflows) == 0 {
fmt.Println(err) fmt.Println(err)
return &model.GetWorkflowsOutput{ return &model.GetWorkflowsOutput{
TotalNoOfWorkflowRuns: 0, TotalNoOfWorkflowRuns: 0,
@ -388,90 +387,163 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
result = append(result, &newWorkflowRun) result = append(result, &newWorkflowRun)
} }
totalFilteredWorkflowRuns := 0 totalFilteredWorkflowRunsCounter := 0
if len(workflows) > 0 && len(workflows[0].TotalFilteredWorkflowRuns) > 0 { if len(workflows) > 0 && len(workflows[0].TotalFilteredWorkflowRuns) > 0 {
totalFilteredWorkflowRuns = workflows[0].TotalFilteredWorkflowRuns[0].Count totalFilteredWorkflowRunsCounter = workflows[0].TotalFilteredWorkflowRuns[0].Count
} }
output := model.GetWorkflowsOutput{ output := model.GetWorkflowsOutput{
TotalNoOfWorkflowRuns: totalFilteredWorkflowRuns, TotalNoOfWorkflowRuns: totalFilteredWorkflowRunsCounter,
WorkflowRuns: result, WorkflowRuns: result,
} }
return &output, nil return &output, nil
} }
// Deprecated
func QueryWorkflows(project_id string) ([]*model.ScheduledWorkflows, error) {
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}})
if err != nil {
return nil, err
}
result := []*model.ScheduledWorkflows{}
for _, workflow := range chaosWorkflows {
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
if err != nil {
return nil, err
}
if workflow.IsRemoved == false {
var Weightages []*model.Weightages
copier.Copy(&Weightages, &workflow.Weightages)
newChaosWorkflows := model.ScheduledWorkflows{
WorkflowType: string(workflow.WorkflowType),
WorkflowID: workflow.WorkflowID,
WorkflowManifest: workflow.WorkflowManifest,
WorkflowName: workflow.WorkflowName,
CronSyntax: workflow.CronSyntax,
WorkflowDescription: workflow.WorkflowDescription,
Weightages: Weightages,
IsCustomWorkflow: workflow.IsCustomWorkflow,
UpdatedAt: workflow.UpdatedAt,
CreatedAt: workflow.CreatedAt,
ProjectID: workflow.ProjectID,
IsRemoved: workflow.IsRemoved,
ClusterName: cluster.ClusterName,
ClusterID: cluster.ClusterID,
ClusterType: cluster.ClusterType,
}
result = append(result, &newChaosWorkflows)
}
}
return result, nil
}
// QueryListWorkflow returns all the workflows present in the given project // QueryListWorkflow returns all the workflows present in the given project
func QueryListWorkflow(project_id string, workflowIds []*string) ([]*model.Workflow, error) { func QueryListWorkflow(workflowInput model.ListWorkflowsInput) (*model.ListWorkflowsOutput, error) {
var query bson.D var pipeline mongo.Pipeline
if len(workflowIds) != 0 {
query = bson.D{ // Match with projectID
{"project_id", project_id}, matchProjectIdStage := bson.D{
{"workflow_id", bson.M{"$in": workflowIds}}, {"$match", bson.D{
{"project_id", workflowInput.ProjectID},
}},
}
pipeline = append(pipeline, matchProjectIdStage)
// Match the pipelineIds from the input array
if len(workflowInput.WorkflowIds) != 0 {
matchWfIdStage := bson.D{
{"$match", bson.D{
{"workflow_id", bson.D{
{"$in", workflowInput.WorkflowIds},
}},
}},
} }
} else {
query = bson.D{ pipeline = append(pipeline, matchWfIdStage)
{"project_id", project_id}, }
// Filtering out the workflows that are deleted/removed
matchWfIsRemovedStage := bson.D{
{"$match", bson.D{
{"isRemoved", bson.D{
{"$eq", false},
}},
}},
}
pipeline = append(pipeline, matchWfIsRemovedStage)
// Filtering based on multiple parameters
if workflowInput.Filter != nil {
// Filtering based on workflow name
if workflowInput.Filter.WorkflowName != nil && *workflowInput.Filter.WorkflowName != "" {
matchWfNameStage := bson.D{
{"$match", bson.D{
{"workflow_name", bson.D{
{"$regex", workflowInput.Filter.WorkflowName},
}},
}},
}
pipeline = append(pipeline, matchWfNameStage)
}
// Filtering based on cluster name
if workflowInput.Filter.ClusterName != nil && *workflowInput.Filter.ClusterName != "All" && *workflowInput.Filter.ClusterName != "" {
matchClusterStage := bson.D{
{"$match", bson.D{
{"cluster_name", workflowInput.Filter.ClusterName},
}},
}
pipeline = append(pipeline, matchClusterStage)
} }
} }
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(query)
var sortStage bson.D
switch {
case workflowInput.Sort != nil && workflowInput.Sort.Field == model.WorkflowSortingFieldName:
// Sorting based on WorkflowName time
if workflowInput.Sort.Descending != nil && *workflowInput.Sort.Descending {
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_name", -1},
}},
}
} else {
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_name", 1},
}},
}
}
default:
// Default sorting: sorts it by LastUpdated time in descending order
sortStage = bson.D{
{"$sort", bson.D{
{"updated_at", -1},
}},
}
}
// Pagination
paginatedWorkflows := bson.A{
sortStage,
}
if workflowInput.Pagination != nil {
paginationSkipStage := bson.D{
{"$skip", workflowInput.Pagination.Page * workflowInput.Pagination.Limit},
}
paginationLimitStage := bson.D{
{"$limit", workflowInput.Pagination.Limit},
}
paginatedWorkflows = append(paginatedWorkflows, paginationSkipStage, paginationLimitStage)
}
// Add two stages where we first count the number of filtered workflow and then paginate the results
facetStage := bson.D{
{"$facet", bson.D{
{"total_filtered_workflows", bson.A{
bson.D{{"$count", "count"}},
}},
{"scheduled_workflows", paginatedWorkflows},
}},
}
pipeline = append(pipeline, facetStage)
// Call aggregation on pipeline
workflowsCursor, err := dbOperationsWorkflow.GetAggregateWorkflows(pipeline)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var result []*model.Workflow var result []*model.Workflow
for _, workflow := range chaosWorkflows {
var workflows []dbSchemaWorkflow.AggregatedWorkflows
if err = workflowsCursor.All(context.Background(), &workflows); err != nil || len(workflows) == 0 {
return &model.ListWorkflowsOutput{
TotalNoOfWorkflows: 0,
Workflows: result,
}, nil
}
for _, workflow := range workflows[0].ScheduledWorkflows {
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID) cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var Weightages []*model.Weightages var Weightages []*model.Weightages
copier.Copy(&Weightages, &workflow.Weightages) copier.Copy(&Weightages, &workflow.Weightages)
var WorkflowRuns []*model.WorkflowRuns var WorkflowRuns []*model.WorkflowRuns
copier.Copy(&WorkflowRuns, &workflow.WorkflowRuns) copier.Copy(&WorkflowRuns, &workflow.WorkflowRuns)
newChaosWorkflows := model.Workflow{ newChaosWorkflows := model.Workflow{
WorkflowType: string(workflow.WorkflowType),
WorkflowID: workflow.WorkflowID, WorkflowID: workflow.WorkflowID,
WorkflowManifest: workflow.WorkflowManifest, WorkflowManifest: workflow.WorkflowManifest,
WorkflowName: workflow.WorkflowName, WorkflowName: workflow.WorkflowName,
@ -490,7 +562,17 @@ func QueryListWorkflow(project_id string, workflowIds []*string) ([]*model.Workf
} }
result = append(result, &newChaosWorkflows) result = append(result, &newChaosWorkflows)
} }
return result, nil
totalFilteredWorkflowsCounter := 0
if len(workflows) > 0 && len(workflows[0].TotalFilteredWorkflows) > 0 {
totalFilteredWorkflowsCounter = workflows[0].TotalFilteredWorkflows[0].Count
}
output := model.ListWorkflowsOutput{
TotalNoOfWorkflows: totalFilteredWorkflowsCounter,
Workflows: result,
}
return &output, nil
} }
// WorkFlowRunHandler Updates or Inserts a new Workflow Run into the DB // WorkFlowRunHandler Updates or Inserts a new Workflow Run into the DB

View File

@ -47,11 +47,11 @@ type ChaosWorkflowRun struct {
} }
type AggregatedWorkflowRuns struct { type AggregatedWorkflowRuns struct {
TotalFilteredWorkflowRuns []TotalFilteredWorkflowRuns `bson:"total_filtered_workflow_runs"` TotalFilteredWorkflowRuns []TotalFilteredData `bson:"total_filtered_workflow_runs"`
FlattenedWorkflowRuns []FlattenedWorkflowRun `bson:"flattened_workflow_runs"` FlattenedWorkflowRuns []FlattenedWorkflowRun `bson:"flattened_workflow_runs"`
} }
type TotalFilteredWorkflowRuns struct { type TotalFilteredData struct {
Count int `bson:"count"` Count int `bson:"count"`
} }
@ -72,3 +72,8 @@ type FlattenedWorkflowRun struct {
WorkflowRuns ChaosWorkflowRun `bson:"workflow_runs"` WorkflowRuns ChaosWorkflowRun `bson:"workflow_runs"`
IsRemoved bool `bson:"isRemoved"` IsRemoved bool `bson:"isRemoved"`
} }
type AggregatedWorkflows struct {
TotalFilteredWorkflows []TotalFilteredData `bson:"total_filtered_workflows"`
ScheduledWorkflows []ChaosWorkFlowInput `bson:"scheduled_workflows"`
}