List workflow optimization (#2882)

* Optimized ListWorkflow query in backend and frontend along with minor style changes

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* generated go.sum

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* Added filter to filter out removed workflows

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* Added condition to check empty workflows

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>
This commit is contained in:
Sarthak Jain 2021-06-11 13:42:12 +05:30 committed by GitHub
parent 0a7e623356
commit ac00e474f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 1037 additions and 770 deletions

View File

@ -3845,6 +3845,11 @@
"d3-time": "^2.1.1"
},
"dependencies": {
"@types/d3-time": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-2.1.0.tgz",
"integrity": "sha512-qVCiT93utxN0cawScyQuNx8H82vBvZXSClZfgOu3l3dRRlRO6FjKEZlaPgXG9XUFjIAOsA4kAJY101vobHeJLQ=="
},
"d3-array": {
"version": "2.12.1",
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz",

View File

@ -39,7 +39,7 @@
"jsonwebtoken": "^8.5.1",
"jspdf": "^2.1.1",
"jspdf-autotable": "^3.5.13",
"litmus-ui": "^1.1.6",
"litmus-ui": "^1.1.7",
"localforage": "^1.7.3",
"lodash": "^4.17.20",
"moment": "^2.27.0",

View File

@ -40,54 +40,33 @@ export const WORKFLOW_DETAILS = gql`
}
`;
export const SCHEDULE_DETAILS = gql`
query scheduleDetails($projectID: String!) {
getScheduledWorkflows(project_id: $projectID) {
workflow_id
workflow_manifest
cronSyntax
workflow_name
workflow_description
weightages {
experiment_name
weightage
}
isCustomWorkflow
updated_at
created_at
project_id
cluster_id
cluster_type
cluster_name
isRemoved
}
}
`;
export const WORKFLOW_LIST_DETAILS = gql`
query workflowListDetails($projectID: String!, $workflowIDs: [ID]) {
ListWorkflow(project_id: $projectID, workflow_ids: $workflowIDs) {
workflow_id
workflow_manifest
cronSyntax
cluster_name
workflow_name
workflow_description
weightages {
experiment_name
weightage
}
isCustomWorkflow
updated_at
created_at
project_id
cluster_id
cluster_type
isRemoved
workflow_runs {
execution_data
workflow_run_id
last_updated
query workflowListDetails($workflowInput: ListWorkflowsInput!) {
ListWorkflow(workflowInput: $workflowInput) {
total_no_of_workflows
workflows {
workflow_id
workflow_manifest
cronSyntax
cluster_name
workflow_name
workflow_description
weightages {
experiment_name
weightage
}
isCustomWorkflow
updated_at
created_at
project_id
cluster_id
cluster_type
isRemoved
workflow_runs {
execution_data
workflow_run_id
last_updated
}
}
}
}

View File

@ -20,15 +20,6 @@ export interface ScheduleWorkflow {
regularity?: string;
isRemoved: boolean;
}
export interface Schedules {
getScheduledWorkflows: ScheduleWorkflow[];
}
export interface ScheduleDataVars {
projectID: string;
}
export interface DeleteSchedule {
workflow_id: string;
}

View File

@ -51,7 +51,7 @@ export interface WorkflowRun {
workflow_run_id: string;
}
export interface Workflow {
export interface ScheduledWorkflow {
workflow_id: string;
workflow_manifest: string;
cronSyntax: string;
@ -66,11 +66,11 @@ export interface Workflow {
cluster_id: string;
cluster_type: string;
isRemoved: Boolean;
workflow_runs: WorkflowRun[];
workflow_runs?: WorkflowRun[];
}
export interface WorkflowList {
ListWorkflow: Workflow[];
ListWorkflow: ScheduledWorkflow[];
}
export interface WorkflowListDataVars {
@ -87,6 +87,39 @@ export interface ListManifestTemplateArray {
isCustomWorkflow: boolean;
}
export interface Pagination {
page: number;
limit: number;
}
export interface ListManifestTemplate {
ListManifestTemplate: ListManifestTemplateArray[];
}
export interface SortInput {
field: 'Name';
descending?: Boolean;
}
export interface WorkflowFilterInput {
workflow_name?: string;
cluster_name?: string;
}
export interface ListWorkflowsInput {
workflowInput: {
project_id: string;
workflow_ids?: string[];
pagination?: Pagination;
sort?: SortInput;
filter?: WorkflowFilterInput;
};
}
export interface ListWorkflowsOutput {
totalNoOfWorkflows: number;
workflows: ScheduledWorkflow[];
}
export interface ScheduledWorkflows {
ListWorkflow: ListWorkflowsOutput;
}

View File

@ -16,13 +16,16 @@ import YamlEditor from '../../components/YamlEditor/Editor';
import { parseYamlValidations } from '../../components/YamlEditor/Validations';
import Scaffold from '../../containers/layouts/Scaffold';
import { UPDATE_SCHEDULE } from '../../graphql/mutations';
import { SCHEDULE_DETAILS } from '../../graphql/queries';
import { WORKFLOW_LIST_DETAILS } from '../../graphql/queries';
import {
CreateWorkFlowInput,
UpdateWorkflowResponse,
WeightMap,
} from '../../models/graphql/createWorkflowData';
import { ScheduleDataVars, Schedules } from '../../models/graphql/scheduleData';
import {
ListWorkflowsInput,
ScheduledWorkflows,
} from '../../models/graphql/workflowListData';
import { experimentMap, WorkflowData } from '../../models/redux/workflow';
import useActions from '../../redux/actions';
import * as TabActions from '../../redux/actions/tabs';
@ -78,11 +81,17 @@ const EditSchedule: React.FC = () => {
const projectID = getProjectID();
const userRole = getProjectRole();
// Apollo query to get the scheduled data
const { data, loading } = useQuery<Schedules, ScheduleDataVars>(
SCHEDULE_DETAILS,
const { data, loading } = useQuery<ScheduledWorkflows, ListWorkflowsInput>(
WORKFLOW_LIST_DETAILS,
{
variables: { projectID: paramData.scheduleProjectID },
variables: {
workflowInput: {
project_id: projectID,
filter: {
workflow_name: paramData.workflowName,
},
},
},
fetchPolicy: 'cache-and-network',
}
);
@ -91,11 +100,7 @@ const EditSchedule: React.FC = () => {
(state: RootState) => state.workflowManifest.manifest
);
const wfDetails =
data &&
data.getScheduledWorkflows.filter(
(wf) => wf.workflow_name === paramData.workflowName
)[0];
const wfDetails = data && data.ListWorkflow.workflows[0];
const doc = new YAML.Document();
const w: Weights[] = [];
const { cronSyntax, clusterid, clustername } = workflowData;

View File

@ -12,9 +12,9 @@ import Scaffold from '../../containers/layouts/Scaffold';
import { WORKFLOW_LIST_DETAILS } from '../../graphql';
import { ChaosData, ExecutionData } from '../../models/graphql/workflowData';
import {
ListWorkflowsInput,
ScheduledWorkflows,
WeightageMap,
WorkflowList,
WorkflowListDataVars,
} from '../../models/graphql/workflowListData';
import { getProjectID } from '../../utils/getSearchParams';
import PopOver from '../../views/Analytics/WorkflowDashboard/PopOver';
@ -81,11 +81,13 @@ const AnalyticsPage: React.FC = () => {
const projectID = getProjectID();
// Apollo query to get the scheduled workflow data
const { data, error } = useQuery<WorkflowList, WorkflowListDataVars>(
const { data, error } = useQuery<ScheduledWorkflows, ListWorkflowsInput>(
WORKFLOW_LIST_DETAILS,
{
variables: { projectID, workflowIDs: [] },
pollInterval: 100,
variables: {
workflowInput: { project_id: projectID, workflow_ids: [workflowId] },
},
pollInterval: 5000,
}
);
@ -103,9 +105,7 @@ const AnalyticsPage: React.FC = () => {
const chaosDataArray: ChaosData[] = [];
const validWorkflowRunsData: WorkflowRunData[] = [];
try {
const selectedWorkflowSchedule = data?.ListWorkflow.filter(
(w) => w.workflow_id === workflowId
);
const selectedWorkflowSchedule = data?.ListWorkflow.workflows;
const selectedWorkflows = selectedWorkflowSchedule
? selectedWorkflowSchedule[0]?.workflow_runs
: [];
@ -233,13 +233,11 @@ const AnalyticsPage: React.FC = () => {
useEffect(() => {
const workflowTestsArray: WorkFlowTests[] = [];
try {
const selectedWorkflowSchedule = data?.ListWorkflow.filter(
(w) => w.workflow_id === workflowId
);
const selectedWorkflowSchedule = data?.ListWorkflow.workflows;
const workflowRuns = selectedWorkflowSchedule
? selectedWorkflowSchedule[0]?.workflow_runs
: [];
const selectedWorkflows = workflowRuns.filter(
const selectedWorkflows = workflowRuns?.filter(
(w) => w.workflow_run_id === selectedWorkflowRunID
);
selectedWorkflows?.forEach((data) => {
@ -286,9 +284,7 @@ const AnalyticsPage: React.FC = () => {
}, [selectedWorkflowRunID, data]);
// Number of Workflow Runs for the selected Schedule
const selectedWorkflowSchedule = data?.ListWorkflow.filter(
(w) => w.workflow_id === workflowId
);
const selectedWorkflowSchedule = data?.ListWorkflow.workflows;
const workflowRuns = selectedWorkflowSchedule
? selectedWorkflowSchedule[0]?.workflow_runs
: [];
@ -313,7 +309,7 @@ const AnalyticsPage: React.FC = () => {
</div>
<div className={classes.analyticsDiv}>
<WorkflowRunsBarChart
numberOfWorkflowRuns={workflowRuns.length}
numberOfWorkflowRuns={workflowRuns?.length ?? 0}
workflowRunData={workflowRunDataForPlot}
callBackToShowPopOver={setPopOverDisplay}
callBackToSelectWorkflowRun={(

View File

@ -10,15 +10,11 @@ import Loader from '../../components/Loader';
import { StyledTab, TabPanel } from '../../components/Tabs';
import Scaffold from '../../containers/layouts/Scaffold';
import {
SCHEDULE_DETAILS,
WORKFLOW_DETAILS_WITH_EXEC_DATA,
WORKFLOW_EVENTS_WITH_EXEC_DATA,
WORKFLOW_LIST_DETAILS,
} from '../../graphql';
import {
ScheduleDataVars,
Schedules,
ScheduleWorkflow,
} from '../../models/graphql/scheduleData';
import { ScheduleWorkflow } from '../../models/graphql/scheduleData';
import {
ExecutionData,
Workflow,
@ -26,6 +22,10 @@ import {
WorkflowSubscription,
WorkflowSubscriptionInput,
} from '../../models/graphql/workflowData';
import {
ListWorkflowsInput,
ScheduledWorkflows,
} from '../../models/graphql/workflowListData';
import useActions from '../../redux/actions';
import * as NodeSelectionActions from '../../redux/actions/nodeSelection';
import * as TabActions from '../../redux/actions/tabs';
@ -81,20 +81,24 @@ const WorkflowDetails: React.FC = () => {
}
);
const workflow = data?.getWorkflowRuns.workflow_runs[0];
const workflowRun = data?.getWorkflowRuns.workflow_runs[0];
// Apollo query to get the scheduled data
const { data: SchedulesData, loading } = useQuery<
Schedules,
ScheduleDataVars
>(SCHEDULE_DETAILS, {
variables: { projectID },
const { data: workflowData, loading } = useQuery<
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
variables: {
workflowInput: {
project_id: projectID,
workflow_ids: [workflowRun?.workflow_id ?? ' '],
},
},
fetchPolicy: 'cache-and-network',
});
// Using subscription to get realtime data
useEffect(() => {
if (workflow?.phase && workflow.phase === 'Running') {
if (workflowRun?.phase && workflowRun.phase === 'Running') {
subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({
document: WORKFLOW_EVENTS_WITH_EXEC_DATA,
variables: { projectID },
@ -130,17 +134,13 @@ const WorkflowDetails: React.FC = () => {
};
useEffect(() => {
const scheduledWorkflow = SchedulesData?.getScheduledWorkflows.filter(
(schedulesWorkflow) => {
return schedulesWorkflow.workflow_id === workflow?.workflow_id;
}
);
const scheduledWorkflow = workflowData?.ListWorkflow.workflows;
if (scheduledWorkflow) {
setworkflowSchedulesDetails(
(scheduledWorkflow[0] ? scheduledWorkflow[0] : null) as ScheduleWorkflow
);
}
}, [SchedulesData]);
}, [workflowData]);
// On fresh screen refresh 'Workflow' Tab would be selected
useEffect(() => {
@ -149,15 +149,19 @@ const WorkflowDetails: React.FC = () => {
// Setting NodeId of first Node in redux for selection of first node in Argo graph by default
useEffect(() => {
if (workflow && pod_name === '') {
if (workflowRun && pod_name === '') {
if (
Object.keys(JSON.parse(workflow.execution_data as string).nodes).length
Object.keys(JSON.parse(workflowRun.execution_data as string).nodes)
.length
) {
const firstNodeId = JSON.parse(workflow.execution_data as string).nodes[
Object.keys(JSON.parse(workflow.execution_data as string).nodes)[0]
const firstNodeId = JSON.parse(workflowRun.execution_data as string)
.nodes[
Object.keys(JSON.parse(workflowRun.execution_data as string).nodes)[0]
].name;
nodeSelection.selectNode({
...JSON.parse(workflow.execution_data as string).nodes[firstNodeId],
...JSON.parse(workflowRun.execution_data as string).nodes[
firstNodeId
],
pod_name: firstNodeId,
});
} else {
@ -172,11 +176,11 @@ const WorkflowDetails: React.FC = () => {
<div className={classes.button}>
<BackButton />
</div>
{/* If workflow data is present then display the workflow details */}
{workflow && pod_name !== '' && !loading ? (
{/* If workflowRun data is present then display the workflowRun details */}
{workflowRun && pod_name !== '' && !loading ? (
<div>
<Typography data-cy="wfName" className={classes.title}>
{t('workflowDetailsView.headerDesc')} {workflow.workflow_name}
{t('workflowDetailsView.headerDesc')} {workflowRun.workflow_name}
</Typography>
{/* AppBar */}
@ -204,7 +208,8 @@ const WorkflowDetails: React.FC = () => {
{/* Argo Workflow DAG Graph */}
<ArgoWorkflow
nodes={
(JSON.parse(workflow.execution_data) as ExecutionData).nodes
(JSON.parse(workflowRun.execution_data) as ExecutionData)
.nodes
}
setIsInfoToggled={setIsInfoToggled}
/>
@ -212,9 +217,9 @@ const WorkflowDetails: React.FC = () => {
{isInfoToggled ? (
<div>
{pod_name !==
JSON.parse(workflow.execution_data).nodes[
JSON.parse(workflowRun.execution_data).nodes[
Object.keys(
JSON.parse(workflow.execution_data as string).nodes
JSON.parse(workflowRun.execution_data as string).nodes
)[0]
].name ? (
/* Node details and Logs */
@ -223,10 +228,12 @@ const WorkflowDetails: React.FC = () => {
workflowSchedulesDetails?.workflow_manifest as string
}
setIsInfoToggled={setIsInfoToggled}
cluster_id={workflow.cluster_id}
workflow_run_id={workflow.workflow_run_id}
cluster_id={workflowRun.cluster_id}
workflow_run_id={workflowRun.workflow_run_id}
data={
JSON.parse(workflow.execution_data) as ExecutionData
JSON.parse(
workflowRun.execution_data
) as ExecutionData
}
/>
) : (
@ -234,11 +241,13 @@ const WorkflowDetails: React.FC = () => {
<WorkflowInfo
tab={1}
setIsInfoToggled={setIsInfoToggled}
cluster_name={workflow.cluster_name}
cluster_name={workflowRun.cluster_name}
data={
JSON.parse(workflow.execution_data) as ExecutionData
JSON.parse(
workflowRun.execution_data
) as ExecutionData
}
resiliency_score={workflow.resiliency_score}
resiliency_score={workflowRun.resiliency_score}
/>
)}
</div>
@ -249,24 +258,24 @@ const WorkflowDetails: React.FC = () => {
{/* Workflow Info */}
<WorkflowInfo
tab={2}
cluster_name={workflow.cluster_name}
data={JSON.parse(workflow.execution_data) as ExecutionData}
resiliency_score={workflow.resiliency_score}
cluster_name={workflowRun.cluster_name}
data={JSON.parse(workflowRun.execution_data) as ExecutionData}
resiliency_score={workflowRun.resiliency_score}
/>
{/* Table for all Node details */}
<NodeTable
manifest={workflowSchedulesDetails?.workflow_manifest as string}
data={JSON.parse(workflow.execution_data) as ExecutionData}
data={JSON.parse(workflowRun.execution_data) as ExecutionData}
handleClose={() => setLogsModalOpen(true)}
/>
{/* Modal for viewing logs of a node */}
<NodeLogsModal
logsOpen={logsModalOpen}
handleClose={() => setLogsModalOpen(false)}
cluster_id={workflow.cluster_id}
workflow_run_id={workflow.workflow_run_id}
data={JSON.parse(workflow.execution_data) as ExecutionData}
workflow_name={workflow.workflow_name}
cluster_id={workflowRun.cluster_id}
workflow_run_id={workflowRun.workflow_run_id}
data={JSON.parse(workflowRun.execution_data) as ExecutionData}
workflow_name={workflowRun.workflow_name}
/>
</TabPanel>
</div>

View File

@ -9,7 +9,7 @@ import {
} from '@material-ui/core';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { Workflow } from '../../../../models/graphql/workflowListData';
import { ScheduledWorkflow } from '../../../../models/graphql/workflowListData';
import useActions from '../../../../redux/actions';
import * as TabActions from '../../../../redux/actions/tabs';
import { history } from '../../../../redux/configureStore';
@ -22,11 +22,13 @@ import { GetTimeDiff } from '../../../../utils/timeDifferenceString';
import useStyles from '../styles';
interface TableScheduleWorkflow {
scheduleWorkflowList: Workflow[] | undefined;
scheduleWorkflowList: ScheduledWorkflow[];
totalNoOfWorkflows: number;
}
const TableScheduleWorkflow: React.FC<TableScheduleWorkflow> = ({
scheduleWorkflowList,
totalNoOfWorkflows,
}) => {
const classes = useStyles();
const { t } = useTranslation();
@ -37,13 +39,13 @@ const TableScheduleWorkflow: React.FC<TableScheduleWorkflow> = ({
return (
<div>
{scheduleWorkflowList && scheduleWorkflowList.length > 0 ? (
{scheduleWorkflowList.length > 0 ? (
<Paper className={classes.dataTable}>
<div className={classes.tableHeading}>
<Typography variant="h4" className={classes.weightedHeading}>
{t('analyticsDashboard.workflowScheduleTable.title')}
</Typography>
{scheduleWorkflowList.length > 3 ? (
{totalNoOfWorkflows > 3 ? (
<IconButton
className={classes.seeAllArrowBtn}
onClick={() => {
@ -65,7 +67,7 @@ const TableScheduleWorkflow: React.FC<TableScheduleWorkflow> = ({
</div>
<Table className={classes.tableStyling}>
<TableBody>
{scheduleWorkflowList.slice(0, 3).map((schedule) => (
{scheduleWorkflowList.map((schedule) => (
<TableRow
key={schedule.workflow_id}
className={classes.tableRow}

View File

@ -1,7 +1,3 @@
/* eslint-disable prefer-destructuring */
/* eslint-disable no-unused-expressions */
/* eslint-disable no-return-assign */
import { useQuery } from '@apollo/client';
import React from 'react';
import { LocalQuickActionCard } from '../../../components/LocalQuickActionCard';
@ -12,7 +8,6 @@ import {
} from '../../../graphql/queries';
import {
DashboardList,
ListDashboardResponse,
ListDashboardVars,
} from '../../../models/graphql/dashboardsDetails';
import {
@ -21,9 +16,8 @@ import {
ListDataSourceVars,
} from '../../../models/graphql/dataSourceDetails';
import {
Workflow,
WorkflowList,
WorkflowListDataVars,
ListWorkflowsInput,
ScheduledWorkflows,
} from '../../../models/graphql/workflowListData';
import { getProjectID } from '../../../utils/getSearchParams';
import { sortNumAsc } from '../../../utils/sort';
@ -40,25 +34,26 @@ const Overview: React.FC = () => {
const projectID = getProjectID();
// Apollo query to get the scheduled workflow data
const { data: schedulesData } = useQuery<WorkflowList, WorkflowListDataVars>(
WORKFLOW_LIST_DETAILS,
{
variables: {
projectID,
workflowIDs: [],
const { data: schedulesData } = useQuery<
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
variables: {
workflowInput: {
project_id: projectID,
pagination: {
page: 0,
limit: 3,
},
},
fetchPolicy: 'cache-and-network',
pollInterval: 10000,
}
);
},
fetchPolicy: 'cache-and-network',
pollInterval: 10000,
});
const filteredScheduleData = schedulesData?.ListWorkflow.slice().sort(
(a: Workflow, b: Workflow) => {
const x = parseInt(a.updated_at, 10);
const y = parseInt(b.updated_at, 10);
return sortNumAsc(y, x);
}
);
const filteredScheduleData = schedulesData?.ListWorkflow.workflows;
const totalScheduledWorkflows =
schedulesData?.ListWorkflow.totalNoOfWorkflows;
// Apollo query to get the dashboard data
const { data: dashboardsList } = useQuery<DashboardList, ListDashboardVars>(
@ -73,13 +68,11 @@ const Overview: React.FC = () => {
);
const filteredDashboardData = dashboardsList?.ListDashboard
? dashboardsList?.ListDashboard.slice().sort(
(a: ListDashboardResponse, b: ListDashboardResponse) => {
const x = parseInt(a.updated_at, 10);
const y = parseInt(b.updated_at, 10);
return sortNumAsc(y, x);
}
)
? dashboardsList?.ListDashboard.slice().sort((a, b) => {
const x = parseInt(a.updated_at, 10);
const y = parseInt(b.updated_at, 10);
return sortNumAsc(y, x);
})
: [];
// Query for dataSource
const { data } = useQuery<DataSourceList, ListDataSourceVars>(
@ -117,7 +110,10 @@ const Overview: React.FC = () => {
))}
<TableDataSource dataSourceList={filteredDataSourceData} />
<TableDashboardData dashboardDataList={filteredDashboardData} />
<TableScheduleWorkflow scheduleWorkflowList={filteredScheduleData} />
<TableScheduleWorkflow
scheduleWorkflowList={filteredScheduleData ?? []}
totalNoOfWorkflows={totalScheduledWorkflows ?? 0}
/>
{((filteredScheduleData && filteredScheduleData.length === 0) ||
!filteredScheduleData) && (

View File

@ -5,7 +5,7 @@ import moment from 'moment';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { CheckBox } from '../../../../components/CheckBox';
import { Workflow } from '../../../../models/graphql/workflowListData';
import { ScheduledWorkflow } from '../../../../models/graphql/workflowListData';
import { history } from '../../../../redux/configureStore';
import {
getProjectID,
@ -14,7 +14,7 @@ import {
import useStyles, { StyledTableCell } from './styles';
interface TableDataProps {
data: Workflow;
data: ScheduledWorkflow;
itemSelectionStatus: boolean;
labelIdentifier: string;
comparisonState: Boolean;

View File

@ -25,10 +25,10 @@ import Loader from '../../../../components/Loader';
import { WORKFLOW_LIST_DETAILS } from '../../../../graphql/queries';
import {
ExecutionData,
ListWorkflowsInput,
ScheduledWorkflow,
ScheduledWorkflows,
WeightageMap,
Workflow,
WorkflowList,
WorkflowListDataVars,
} from '../../../../models/graphql/workflowListData';
import { getProjectID } from '../../../../utils/getSearchParams';
import {
@ -104,7 +104,7 @@ const WorkflowComparisonTable = () => {
},
searchTokens: [''],
});
const [displayData, setDisplayData] = useState<Workflow[]>([]);
const [displayData, setDisplayData] = useState<ScheduledWorkflow[]>([]);
const [clusters, setClusters] = React.useState<string[]>([]);
const [page, setPage] = React.useState(0);
const [rowsPerPage, setRowsPerPage] = React.useState(5);
@ -127,15 +127,15 @@ const WorkflowComparisonTable = () => {
const projectID = getProjectID();
// Apollo query to get the scheduled workflow data
const { data, loading, error } = useQuery<WorkflowList, WorkflowListDataVars>(
WORKFLOW_LIST_DETAILS,
{
variables: { projectID, workflowIDs: [] },
fetchPolicy: 'cache-and-network',
}
);
const { data, loading, error } = useQuery<
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
variables: { workflowInput: { project_id: projectID } },
fetchPolicy: 'cache-and-network',
});
const getClusters = (searchingData: Workflow[]) => {
const getClusters = (searchingData: ScheduledWorkflow[]) => {
const uniqueList: string[] = [];
searchingData.forEach((data) => {
if (!uniqueList.includes(data.cluster_name)) {
@ -158,7 +158,9 @@ const WorkflowComparisonTable = () => {
const handleSelectAllClick = (event: React.ChangeEvent<HTMLInputElement>) => {
if (event.target.checked) {
const newSelecteds = displayData.map((n: Workflow) => n.workflow_id);
const newSelecteds = displayData.map(
(n: ScheduledWorkflow) => n.workflow_id
);
setSelected(newSelecteds);
return;
}
@ -185,13 +187,13 @@ const WorkflowComparisonTable = () => {
};
const searchingDataRetriever = () => {
let searchingData: Workflow[] = [];
let searchingData: ScheduledWorkflow[] = [];
if (compare === false) {
searchingData = data?.ListWorkflow ?? [];
searchingData = data?.ListWorkflow.workflows ?? [];
} else {
const searchedData: Workflow[] = [];
const searchedData: ScheduledWorkflow[] = [];
selected.forEach((workflowID) => {
data?.ListWorkflow.forEach((workflow) => {
data?.ListWorkflow.workflows.forEach((workflow) => {
if (workflow.workflow_id === workflowID) {
searchedData.push(workflow);
}
@ -230,14 +232,16 @@ const WorkflowComparisonTable = () => {
const totalValidWorkflowRuns: WorkflowDataForExport[] = [];
const timeSeriesArray: DatedResilienceScore[][] = [];
selected.forEach((workflow) => {
const workflowData = data?.ListWorkflow.filter(function match(wkf) {
const workflowData = data?.ListWorkflow.workflows.filter(function match(
wkf
) {
return wkf.workflow_id === workflow;
});
const runs = workflowData ? workflowData[0].workflow_runs : [];
const workflowTimeSeriesData: DatedResilienceScore[] = [];
let isWorkflowValid: boolean = false;
try {
runs.forEach((data) => {
runs?.forEach((data) => {
try {
const executionData: ExecutionData = JSON.parse(
data.execution_data
@ -416,11 +420,11 @@ const WorkflowComparisonTable = () => {
const CallbackForComparing = (compareWorkflows: boolean) => {
setCompare(compareWorkflows);
const payload: Workflow[] = [];
const payload: ScheduledWorkflow[] = [];
selected.forEach((workflow) => {
displayData.forEach((displayWorkflow, i) => {
if (displayWorkflow.workflow_id === workflow && data) {
payload.push(data.ListWorkflow[i]);
payload.push(data?.ListWorkflow.workflows[i]);
}
});
});
@ -554,13 +558,13 @@ const WorkflowComparisonTable = () => {
};
useEffect(() => {
setDisplayData(data ? data.ListWorkflow : []);
getClusters(data ? data.ListWorkflow : []);
setDisplayData(data ? data.ListWorkflow.workflows : []);
getClusters(data ? data.ListWorkflow.workflows : []);
}, [data]);
useEffect(() => {
const payload = searchingDataRetriever()
.filter((wkf: Workflow) => {
.filter((wkf) => {
return filter.searchTokens.every(
(s: string) =>
wkf.workflow_name.toLowerCase().includes(s) ||
@ -589,7 +593,7 @@ const WorkflowComparisonTable = () => {
)
).getTime();
})
.sort((a: Workflow, b: Workflow) => {
.sort((a, b) => {
// Sorting based on unique fields
if (filter.sortData.name.sort) {
const x = a.workflow_name;
@ -752,7 +756,7 @@ const WorkflowComparisonTable = () => {
page * rowsPerPage,
page * rowsPerPage + rowsPerPage
)
.map((data: Workflow, index: number) => {
.map((data, index) => {
const isItemSelected = isSelected(data.workflow_id);
const labelId = `enhanced-table-checkbox-${index}`;
return (
@ -799,7 +803,7 @@ const WorkflowComparisonTable = () => {
</Table>
</TableContainer>
{/* </MuiThemeProvider> */}
{compare === false || showAll === true ? (
{!compare || showAll ? (
<TablePagination
rowsPerPageOptions={[5, 10, 25, 50]}
component="div"

View File

@ -22,7 +22,7 @@ import React from 'react';
import { useTranslation } from 'react-i18next';
import YAML from 'yaml';
import { RERUN_CHAOS_WORKFLOW } from '../../../graphql/mutations';
import { ScheduleWorkflow } from '../../../models/graphql/scheduleData';
import { ScheduledWorkflow } from '../../../models/graphql/workflowListData';
import useActions from '../../../redux/actions';
import * as TabActions from '../../../redux/actions/tabs';
import * as WorkflowActions from '../../../redux/actions/workflow';
@ -35,9 +35,9 @@ import SaveTemplateModal from './SaveTemplateModal';
import useStyles from './styles';
interface TableDataProps {
data: ScheduleWorkflow;
data: ScheduledWorkflow;
deleteRow: (wfid: string) => void;
handleToggleSchedule: (schedule: ScheduleWorkflow) => void;
handleToggleSchedule: (schedule: ScheduledWorkflow) => void;
}
const TableData: React.FC<TableDataProps> = ({

View File

@ -26,38 +26,27 @@ import YAML from 'yaml';
import Loader from '../../../components/Loader';
import {
DELETE_WORKFLOW,
SCHEDULE_DETAILS,
GET_CLUSTER_NAMES,
UPDATE_SCHEDULE,
WORKFLOW_LIST_DETAILS,
} from '../../../graphql';
import { Clusters, ClusterVars } from '../../../models/graphql/clusterData';
import { WeightMap } from '../../../models/graphql/createWorkflowData';
import { DeleteSchedule } from '../../../models/graphql/scheduleData';
import {
DeleteSchedule,
ScheduleDataVars,
Schedules,
ScheduleWorkflow,
} from '../../../models/graphql/scheduleData';
ListWorkflowsInput,
Pagination,
ScheduledWorkflow,
ScheduledWorkflows,
SortInput,
WorkflowFilterInput,
} from '../../../models/graphql/workflowListData';
import { getProjectID } from '../../../utils/getSearchParams';
import {
sortAlphaAsc,
sortAlphaDesc,
sortNumAsc,
sortNumDesc,
} from '../../../utils/sort';
import useStyles from './styles';
import TableData from './TableData';
interface FilterOption {
search: string;
cluster: string;
suspended: string;
}
interface PaginationData {
pageNo: number;
rowsPerPage: number;
}
interface SortData {
startDate: { sort: boolean; ascending: boolean };
name: { sort: boolean; ascending: boolean };
interface FilterOption extends WorkflowFilterInput {
suspended?: string;
}
const BrowseSchedule: React.FC = () => {
@ -65,33 +54,60 @@ const BrowseSchedule: React.FC = () => {
const projectID = getProjectID();
const { t } = useTranslation();
// Apollo query to get the scheduled data
const { data, loading, error } = useQuery<Schedules, ScheduleDataVars>(
SCHEDULE_DETAILS,
{
variables: { projectID },
fetchPolicy: 'cache-and-network',
}
);
// Apollo mutation to delete the selected schedule
const [deleteSchedule] = useMutation<DeleteSchedule>(DELETE_WORKFLOW, {
refetchQueries: [{ query: SCHEDULE_DETAILS, variables: { projectID } }],
// State for pagination
const [paginationData, setPaginationData] = useState<Pagination>({
page: 0,
limit: 10,
});
// State for search and filtering
const [filter, setFilter] = React.useState<FilterOption>({
search: '',
cluster: 'All',
// States for filters
const [filters, setFilters] = useState<FilterOption>({
workflow_name: '',
cluster_name: 'All',
suspended: 'All',
});
// State for sorting
const [sortData, setSortData] = useState<SortInput>({
field: 'Name',
descending: true,
});
// Apollo query to get the scheduled data
const { data, refetch, loading, error } = useQuery<
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
variables: {
workflowInput: {
project_id: projectID,
pagination: {
page: paginationData.page,
limit: paginationData.limit,
},
sort: sortData,
filter: {
workflow_name: filters.workflow_name,
cluster_name: filters.cluster_name,
},
},
},
fetchPolicy: 'cache-and-network',
});
// Apollo mutation to delete the selected schedule
const [deleteSchedule] = useMutation<DeleteSchedule>(DELETE_WORKFLOW, {
onCompleted: () => refetch(),
});
// State for search and filtering
const [updateSchedule] = useMutation(UPDATE_SCHEDULE, {
refetchQueries: [{ query: SCHEDULE_DETAILS, variables: { projectID } }],
onCompleted: () => refetch(),
});
// Disable and re-enable a schedule
const handleToggleSchedule = (schedule: ScheduleWorkflow) => {
const handleToggleSchedule = (schedule: ScheduledWorkflow) => {
const yaml = YAML.parse(schedule.workflow_manifest);
if (yaml.spec.suspend === undefined || yaml.spec.suspend === false) {
yaml.spec.suspend = true;
@ -125,66 +141,25 @@ const BrowseSchedule: React.FC = () => {
});
};
// State for pagination
const [paginationData, setPaginationData] = useState<PaginationData>({
pageNo: 0,
rowsPerPage: 5,
});
// Query to get list of Clusters
const { data: clusterList } = useQuery<Partial<Clusters>, ClusterVars>(
GET_CLUSTER_NAMES,
{
variables: {
project_id: projectID,
},
}
);
// State for sorting
const [sortData, setSortData] = useState<SortData>({
name: { sort: false, ascending: true },
startDate: { sort: true, ascending: true },
});
const getClusters = (searchingData: ScheduleWorkflow[]) => {
const uniqueList: string[] = [];
searchingData.forEach((data) => {
if (!uniqueList.includes(data.cluster_name)) {
uniqueList.push(data.cluster_name);
}
});
return uniqueList;
};
const filteredData = data?.getScheduledWorkflows
.filter((dataRow) =>
dataRow.workflow_name.toLowerCase().includes(filter.search.toLowerCase())
)
.filter((dataRow) =>
filter.cluster === 'All'
? true
: dataRow.cluster_name
.toLowerCase()
.includes(filter.cluster.toLowerCase())
)
.filter((dataRow) =>
filter.suspended === 'All'
? true
: filter.suspended === 'true'
? YAML.parse(dataRow.workflow_manifest).spec.suspend === true
: filter.suspended === 'false'
? YAML.parse(dataRow.workflow_manifest).spec.suspend === undefined
: false
)
.sort((a: ScheduleWorkflow, b: ScheduleWorkflow) => {
// Sorting based on unique fields
if (sortData.name.sort) {
const x = a.workflow_name;
const y = b.workflow_name;
return sortData.name.ascending
? sortAlphaAsc(x, y)
: sortAlphaDesc(x, y);
}
if (sortData.startDate.sort) {
const x = parseInt(a.updated_at, 10);
const y = parseInt(b.updated_at, 10);
return sortData.startDate.ascending
? sortNumAsc(y, x)
: sortNumDesc(y, x);
}
return 0;
});
const filteredWorkflows = data?.ListWorkflow.workflows.filter((dataRow) =>
filters.suspended === 'All'
? true
: filters.suspended === 'true'
? YAML.parse(dataRow.workflow_manifest).spec.suspend === true
: filters.suspended === 'false'
? YAML.parse(dataRow.workflow_manifest).spec.suspend === undefined
: false
);
const deleteRow = (wfid: string) => {
deleteSchedule({
@ -200,9 +175,12 @@ const BrowseSchedule: React.FC = () => {
id="input-with-icon-adornment"
placeholder="Search"
className={classes.search}
value={filter.search}
value={filters.workflow_name}
onChange={(event) =>
setFilter({ ...filter, search: event.target.value as string })
setFilters({
...filters,
workflow_name: event.target.value as string,
})
}
startAdornment={
<InputAdornment position="start">
@ -218,10 +196,10 @@ const BrowseSchedule: React.FC = () => {
>
<InputLabel className={classes.selectText}>Name</InputLabel>
<Select
value={filter.suspended}
value={filters.suspended}
onChange={(event) =>
setFilter({
...filter,
setFilters({
...filters,
suspended: event.target.value as string,
})
}
@ -249,19 +227,25 @@ const BrowseSchedule: React.FC = () => {
>
<InputLabel className={classes.selectText}>Target Agent</InputLabel>
<Select
value={filter.cluster}
value={filters.cluster_name}
onChange={(event) =>
setFilter({ ...filter, cluster: event.target.value as string })
setFilters({
...filters,
cluster_name: event.target.value as string,
})
}
label="Target Cluster"
className={classes.selectText}
>
<MenuItem value="All">All</MenuItem>
{(data ? getClusters(data.getScheduledWorkflows) : []).map(
(cluster: any) => (
<MenuItem value={cluster}>{cluster}</MenuItem>
)
)}
{clusterList?.getCluster?.map((cluster) => (
<MenuItem
key={cluster.cluster_name}
value={cluster.cluster_name}
>
{cluster.cluster_name}
</MenuItem>
))}
</Select>
</FormControl>
</div>
@ -287,9 +271,8 @@ const BrowseSchedule: React.FC = () => {
size="small"
onClick={() =>
setSortData({
...sortData,
name: { sort: false, ascending: false },
startDate: { sort: false, ascending: false },
field: 'Name',
descending: false,
})
}
>
@ -300,9 +283,8 @@ const BrowseSchedule: React.FC = () => {
size="small"
onClick={() =>
setSortData({
...sortData,
name: { sort: false, ascending: true },
startDate: { sort: true, ascending: true },
field: 'Name',
descending: true,
})
}
>
@ -356,25 +338,19 @@ const BrowseSchedule: React.FC = () => {
<Typography align="center">Unable to fetch data</Typography>
</TableCell>
</TableRow>
) : filteredData && filteredData.length ? (
filteredData
.slice(
paginationData.pageNo * paginationData.rowsPerPage,
paginationData.pageNo * paginationData.rowsPerPage +
paginationData.rowsPerPage
)
.map((data: ScheduleWorkflow) => (
<TableRow
data-cy="workflowSchedulesTableRow"
key={data.workflow_id}
>
<TableData
data={data}
deleteRow={deleteRow}
handleToggleSchedule={handleToggleSchedule}
/>
</TableRow>
))
) : filteredWorkflows && filteredWorkflows.length ? (
filteredWorkflows.map((data) => (
<TableRow
data-cy="workflowSchedulesTableRow"
key={data.workflow_id}
>
<TableData
data={data}
deleteRow={deleteRow}
handleToggleSchedule={handleToggleSchedule}
/>
</TableRow>
))
) : (
<TableRow>
<TableCell data-cy="browseScheduleNoData" colSpan={7}>
@ -388,19 +364,19 @@ const BrowseSchedule: React.FC = () => {
{/* Pagination Section */}
<TablePagination
rowsPerPageOptions={[5, 10, 25]}
rowsPerPageOptions={[10, 25, 50]}
component="div"
count={filteredData?.length ?? 0}
rowsPerPage={paginationData.rowsPerPage}
page={paginationData.pageNo}
count={filteredWorkflows?.length ?? 0}
rowsPerPage={paginationData.limit}
page={paginationData.page}
onChangePage={(_, page) =>
setPaginationData({ ...paginationData, pageNo: page })
setPaginationData({ ...paginationData, page })
}
onChangeRowsPerPage={(event) => {
setPaginationData({
...paginationData,
pageNo: 0,
rowsPerPage: parseInt(event.target.value, 10),
page: 0,
limit: parseInt(event.target.value, 10),
});
}}
/>

View File

@ -22,8 +22,8 @@ import {
} from '../../../graphql';
import { WorkflowRun } from '../../../models/graphql/workflowData';
import {
WorkflowList,
WorkflowListDataVars,
ListWorkflowsInput,
ScheduledWorkflows,
} from '../../../models/graphql/workflowListData';
import useActions from '../../../redux/actions';
import * as NodeSelectionActions from '../../../redux/actions/nodeSelection';
@ -62,12 +62,14 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
};
const { data: scheduledWorkflowData } = useQuery<
WorkflowList,
WorkflowListDataVars
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
variables: {
projectID,
workflowIDs: [data.workflow_id as string],
workflowInput: {
project_id: projectID,
workflow_ids: [data.workflow_id ?? ''],
},
},
});
@ -298,7 +300,11 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
>
<Typography className={classes.boldText}>
{t('chaosWorkflows.browseWorkflows.tableData.showExperiments')}(
{scheduledWorkflowData?.ListWorkflow[0].weightages.length})
{
scheduledWorkflowData?.ListWorkflow.workflows[0].weightages
.length
}
)
</Typography>
<div className={classes.experimentDetails}>
{isOpen ? (
@ -323,7 +329,7 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
}}
>
<div className={classes.popover}>
{scheduledWorkflowData?.ListWorkflow[0].weightages.map(
{scheduledWorkflowData?.ListWorkflow.workflows[0].weightages.map(
(weightEntry) => (
<div
key={weightEntry.experiment_name}

View File

@ -136,11 +136,13 @@ const useStyles = makeStyles((theme) => ({
flexDirection: 'row',
cursor: 'pointer',
},
btnImg: {
width: '0.8125rem',
height: '0.8125rem',
marginTop: theme.spacing(0.375),
},
btnText: {
paddingLeft: theme.spacing(1.625),
},

View File

@ -28,8 +28,8 @@ const useStyles = makeStyles((theme) => ({
backgroundColor: theme.palette.warning.light,
},
failed: {
color: theme.palette.error.main,
backgroundColor: theme.palette.error.light,
color: theme.palette.status.failed,
backgroundColor: theme.palette.status.failed,
},
statusFont: {
fontSize: '0.725rem',

View File

@ -397,7 +397,7 @@ const VerifyCommit = forwardRef(
fullWidth
multiline
error={checkNameValidation()}
onSave={(value) =>
onSave={(value: any) =>
handleNameChange({ changedName: value })
}
helperText={
@ -436,7 +436,7 @@ const VerifyCommit = forwardRef(
id="desc"
fullWidth
multiline
onSave={(value) =>
onSave={(value: any) =>
handleDescChange({ changedDesc: value })
}
/>
@ -469,7 +469,7 @@ const VerifyCommit = forwardRef(
fullWidth
multiline
error={checkSubjectValidation()}
onSave={(value) =>
onSave={(value: any) =>
handleSubjectChange({ changedSubject: value })
}
helperText={

View File

@ -27,6 +27,7 @@ require (
go.mongodb.org/mongo-driver v1.3.5
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9
golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e // indirect
golang.org/x/tools v0.0.0-20200428211428-0c9eba77bc32
gopkg.in/src-d/go-git.v4 v4.13.1
gopkg.in/yaml.v2 v2.3.0
k8s.io/apimachinery v0.18.6

View File

@ -643,6 +643,8 @@ github.com/litmuschaos/chaos-operator v0.0.0-20210224131102-ca6a465ed348/go.mod
github.com/litmuschaos/chaos-scheduler v0.0.0-20210607090343-9952190ad032 h1:Nza94oOqOsao8eFWC19iFviS8XsxS2eVk7Q0a9WDKBE=
github.com/litmuschaos/chaos-scheduler v0.0.0-20210607090343-9952190ad032/go.mod h1:7EO6kbZKeJGKzkchgQepCxywvqNFNvNHW0G+u9923AY=
github.com/litmuschaos/elves v0.0.0-20201107015738-552d74669e3c/go.mod h1:DsbHGNUq/78NZozWVVI9Q6eBei4I+JjlkkD5aibJ3MQ=
github.com/litmuschaos/litmus v0.0.0-20210610061227-c0d001df3f33 h1:TdJzS++HpQWypGHPXyUGSQzN7K5eajy9/K34UQLVSBw=
github.com/litmuschaos/litmus v0.0.0-20210610070956-555e651c89ea h1:nWDzJZvpiJc37yKO456Cv9TRukS5PoeO/pSmRtxVb6A=
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/lpabon/godbc v0.1.1/go.mod h1:Jo9QV0cf3U6jZABgiJ2skINAXb9j8m51r07g4KI92ZA=
github.com/lucas-clemente/aes12 v0.0.0-20171027163421-cd47fb39b79f/go.mod h1:JpH9J1c9oX6otFSgdUHwUBUizmKlrMjxWnIAjff4m04=

View File

@ -186,6 +186,11 @@ type ComplexityRoot struct {
URL func(childComplexity int) int
}
ListWorkflowsOutput struct {
TotalNoOfWorkflows func(childComplexity int) int
Workflows func(childComplexity int) int
}
Maintainer struct {
Email func(childComplexity int) int
Name func(childComplexity int) int
@ -340,7 +345,6 @@ type ComplexityRoot struct {
GetPromLabelNamesAndValues func(childComplexity int, series *model.PromSeriesInput) int
GetPromQuery func(childComplexity int, query *model.PromInput) int
GetPromSeriesList func(childComplexity int, dsDetails *model.DsDetails) int
GetScheduledWorkflows func(childComplexity int, projectID string) int
GetTemplateManifestByID func(childComplexity int, templateID string) int
GetUser func(childComplexity int, username string) int
GetWorkflowRuns func(childComplexity int, workflowRunsInput model.GetWorkflowRunsInput) int
@ -350,7 +354,7 @@ type ComplexityRoot struct {
ListImageRegistry func(childComplexity int, projectID string) int
ListManifestTemplate func(childComplexity int, projectID string) int
ListProjects func(childComplexity int) int
ListWorkflow func(childComplexity int, projectID string, workflowIds []*string) int
ListWorkflow func(childComplexity int, workflowInput model.ListWorkflowsInput) int
Users func(childComplexity int) int
}
@ -431,7 +435,6 @@ type ComplexityRoot struct {
WorkflowManifest func(childComplexity int) int
WorkflowName func(childComplexity int) int
WorkflowRuns func(childComplexity int) int
WorkflowType func(childComplexity int) int
}
WorkflowRun struct {
@ -626,8 +629,7 @@ type QueryResolver interface {
GetProject(ctx context.Context, projectID string) (*model.Project, error)
ListProjects(ctx context.Context) ([]*model.Project, error)
Users(ctx context.Context) ([]*model.User, error)
GetScheduledWorkflows(ctx context.Context, projectID string) ([]*model.ScheduledWorkflows, error)
ListWorkflow(ctx context.Context, projectID string, workflowIds []*string) ([]*model.Workflow, error)
ListWorkflow(ctx context.Context, workflowInput model.ListWorkflowsInput) (*model.ListWorkflowsOutput, error)
GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error)
GetHubExperiment(ctx context.Context, experimentInput model.ExperimentInput) (*model.Chart, error)
GetHubStatus(ctx context.Context, projectID string) ([]*model.MyHubStatus, error)
@ -1305,6 +1307,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Link.URL(childComplexity), true
case "ListWorkflowsOutput.total_no_of_workflows":
if e.complexity.ListWorkflowsOutput.TotalNoOfWorkflows == nil {
break
}
return e.complexity.ListWorkflowsOutput.TotalNoOfWorkflows(childComplexity), true
case "ListWorkflowsOutput.workflows":
if e.complexity.ListWorkflowsOutput.Workflows == nil {
break
}
return e.complexity.ListWorkflowsOutput.Workflows(childComplexity), true
case "Maintainer.Email":
if e.complexity.Maintainer.Email == nil {
break
@ -2415,18 +2431,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Query.GetPromSeriesList(childComplexity, args["ds_details"].(*model.DsDetails)), true
case "Query.getScheduledWorkflows":
if e.complexity.Query.GetScheduledWorkflows == nil {
break
}
args, err := ec.field_Query_getScheduledWorkflows_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Query.GetScheduledWorkflows(childComplexity, args["project_id"].(string)), true
case "Query.GetTemplateManifestByID":
if e.complexity.Query.GetTemplateManifestByID == nil {
break
@ -2540,7 +2544,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return 0, false
}
return e.complexity.Query.ListWorkflow(childComplexity, args["project_id"].(string), args["workflow_ids"].([]*string)), true
return e.complexity.Query.ListWorkflow(childComplexity, args["workflowInput"].(model.ListWorkflowsInput)), true
case "Query.users":
if e.complexity.Query.Users == nil {
@ -3001,13 +3005,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.Workflow.WorkflowRuns(childComplexity), true
case "Workflow.workflow_type":
if e.complexity.Workflow.WorkflowType == nil {
break
}
return e.complexity.Workflow.WorkflowType(childComplexity), true
case "WorkflowRun.cluster_id":
if e.complexity.WorkflowRun.ClusterID == nil {
break
@ -4269,7 +4266,7 @@ input PodLogRequest {
}
type ScheduledWorkflows {
workflow_type:String!
workflow_type: String!
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
@ -4286,25 +4283,6 @@ type ScheduledWorkflows {
isRemoved: Boolean!
}
type Workflow {
workflow_type:String!
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
cluster_name: String!
workflow_name: String!
workflow_description: String!
weightages: [weightages!]!
isCustomWorkflow: Boolean!
updated_at: String!
created_at: String!
project_id: ID!
cluster_id: ID!
cluster_type: String!
isRemoved: Boolean!
workflow_runs: [WorkflowRuns]
}
type WorkflowRuns {
execution_data: String!
workflow_run_id: ID!
@ -4404,10 +4382,8 @@ type Query {
users: [User!]! @authorized
# [Deprecated soon]
getScheduledWorkflows(project_id: String!): [ScheduledWorkflows]! @authorized
ListWorkflow(project_id: String!, workflow_ids: [ID]): [Workflow]! @authorized
ListWorkflow(workflowInput: ListWorkflowsInput!): ListWorkflowsOutput!
@authorized
getCharts(HubName: String!, projectID: String!): [Chart!]! @authorized
@ -4425,7 +4401,8 @@ type Query {
GetPromQuery(query: promInput): promResponse! @authorized
GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse! @authorized
GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse!
@authorized
GetPromSeriesList(ds_details: dsDetails): promSeriesListResponse! @authorized
@ -4461,13 +4438,16 @@ type Mutation {
## Workflow APIs
# It is used to create chaosworkflow
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse! @authorized
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse!
@authorized
reRunChaosWorkFlow(workflowID: String!): String! @authorized
deleteChaosWorkflow(workflowid: String, workflow_run_id: String): Boolean! @authorized
deleteChaosWorkflow(workflowid: String, workflow_run_id: String): Boolean!
@authorized
syncWorkflow(workflowid: String!, workflow_run_id: String!): Boolean! @authorized
syncWorkflow(workflowid: String!, workflow_run_id: String!): Boolean!
@authorized
#Used for sending invitation
sendInvitation(member: MemberInput!): Member @authorized
@ -4485,7 +4465,8 @@ type Mutation {
leaveProject(member: MemberInput!): String! @authorized
#Used to update project name
updateProjectName(projectID: String!, projectName: String!): String! @authorized
updateProjectName(projectID: String!, projectName: String!): String!
@authorized
#It is used to confirm the subscriber registration
clusterConfirm(identity: ClusterIdentity!): ClusterConfirmResponse!
@ -4505,7 +4486,8 @@ type Mutation {
syncHub(id: ID!): [MyHubStatus!]! @authorized
updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse! @authorized
updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse!
@authorized
deleteClusterReg(cluster_id: String!): String! @authorized
@ -4540,7 +4522,8 @@ type Mutation {
deleteDataSource(input: deleteDSInput!): Boolean! @authorized
# Manifest Template
createManifestTemplate(templateInput: TemplateInput): ManifestTemplate! @authorized
createManifestTemplate(templateInput: TemplateInput): ManifestTemplate!
@authorized
deleteManifestTemplate(template_id: String!): Boolean! @authorized
@ -4556,7 +4539,8 @@ type Mutation {
imageRegistryInfo: imageRegistryInput!
): ImageRegistryResponse! @authorized
deleteImageRegistry(image_registry_id: String!, project_id: String!): String! @authorized
deleteImageRegistry(image_registry_id: String!, project_id: String!): String!
@authorized
}
type Subscription {
@ -4570,7 +4554,8 @@ type Subscription {
#It is used to listen cluster operation request from the graphql server
clusterConnect(clusterInfo: ClusterIdentity!): ClusterAction!
getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse! @authorized
getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse!
@authorized
}
`, BuiltIn: false},
&ast.Source{Name: "graph/usermanagement.graphqls", Input: `type User {
@ -4633,7 +4618,7 @@ enum WorkflowRunSortingField {
Time
}
input SortInput {
input WorkflowRunSortInput {
field: WorkflowRunSortingField!
descending: Boolean
}
@ -4642,7 +4627,7 @@ input GetWorkflowRunsInput {
project_id: ID!
workflow_run_ids: [ID]
pagination: Pagination
sort: SortInput
sort: WorkflowRunSortInput
filter: WorkflowRunFilterInput
}
@ -4667,6 +4652,51 @@ type GetWorkflowsOutput {
total_no_of_workflow_runs: Int!
workflow_runs: [WorkflowRun]!
}
input WorkflowFilterInput {
workflow_name: String
cluster_name: String
}
input ListWorkflowsInput {
project_id: ID!
workflow_ids: [ID]
pagination: Pagination
sort: WorkflowSortInput
filter: WorkflowFilterInput
}
enum WorkflowSortingField {
Name
}
input WorkflowSortInput {
field: WorkflowSortingField!
descending: Boolean
}
type Workflow {
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
cluster_name: String!
workflow_name: String!
workflow_description: String!
weightages: [weightages!]!
isCustomWorkflow: Boolean!
updated_at: String!
created_at: String!
project_id: ID!
cluster_id: ID!
cluster_type: String!
isRemoved: Boolean!
workflow_runs: [WorkflowRuns]
}
type ListWorkflowsOutput {
total_no_of_workflows: Int!
workflows: [Workflow]!
}
`, BuiltIn: false},
}
var parsedSchema = gqlparser.MustLoadSchema(sources...)
@ -5524,22 +5554,14 @@ func (ec *executionContext) field_Query_ListManifestTemplate_args(ctx context.Co
func (ec *executionContext) field_Query_ListWorkflow_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 string
if tmp, ok := rawArgs["project_id"]; ok {
arg0, err = ec.unmarshalNString2string(ctx, tmp)
var arg0 model.ListWorkflowsInput
if tmp, ok := rawArgs["workflowInput"]; ok {
arg0, err = ec.unmarshalNListWorkflowsInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsInput(ctx, tmp)
if err != nil {
return nil, err
}
}
args["project_id"] = arg0
var arg1 []*string
if tmp, ok := rawArgs["workflow_ids"]; ok {
arg1, err = ec.unmarshalOID2ᚕᚖstring(ctx, tmp)
if err != nil {
return nil, err
}
}
args["workflow_ids"] = arg1
args["workflowInput"] = arg0
return args, nil
}
@ -5657,20 +5679,6 @@ func (ec *executionContext) field_Query_getProject_args(ctx context.Context, raw
return args, nil
}
func (ec *executionContext) field_Query_getScheduledWorkflows_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 string
if tmp, ok := rawArgs["project_id"]; ok {
arg0, err = ec.unmarshalNString2string(ctx, tmp)
if err != nil {
return nil, err
}
}
args["project_id"] = arg0
return args, nil
}
func (ec *executionContext) field_Query_getUser_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
@ -8811,6 +8819,74 @@ func (ec *executionContext) _Link_Url(ctx context.Context, field graphql.Collect
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _ListWorkflowsOutput_total_no_of_workflows(ctx context.Context, field graphql.CollectedField, obj *model.ListWorkflowsOutput) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "ListWorkflowsOutput",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.TotalNoOfWorkflows, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) _ListWorkflowsOutput_workflows(ctx context.Context, field graphql.CollectedField, obj *model.ListWorkflowsOutput) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "ListWorkflowsOutput",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Workflows, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]*model.Workflow)
fc.Result = res
return ec.marshalNWorkflow2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflow(ctx, field.Selections, res)
}
func (ec *executionContext) _Maintainer_Name(ctx context.Context, field graphql.CollectedField, obj *model.Maintainer) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
@ -13833,67 +13909,6 @@ func (ec *executionContext) _Query_users(ctx context.Context, field graphql.Coll
return ec.marshalNUser2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐUserᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) _Query_getScheduledWorkflows(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Query",
Field: field,
Args: nil,
IsMethod: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field_Query_getScheduledWorkflows_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
directive0 := func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().GetScheduledWorkflows(rctx, args["project_id"].(string))
}
directive1 := func(ctx context.Context) (interface{}, error) {
if ec.directives.Authorized == nil {
return nil, errors.New("directive authorized is not implemented")
}
return ec.directives.Authorized(ctx, nil, directive0)
}
tmp, err := directive1(rctx)
if err != nil {
return nil, err
}
if tmp == nil {
return nil, nil
}
if data, ok := tmp.([]*model.ScheduledWorkflows); ok {
return data, nil
}
return nil, fmt.Errorf(`unexpected type %T from directive, should be []*github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model.ScheduledWorkflows`, tmp)
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]*model.ScheduledWorkflows)
fc.Result = res
return ec.marshalNScheduledWorkflows2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx, field.Selections, res)
}
func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
@ -13919,7 +13934,7 @@ func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graph
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
directive0 := func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().ListWorkflow(rctx, args["project_id"].(string), args["workflow_ids"].([]*string))
return ec.resolvers.Query().ListWorkflow(rctx, args["workflowInput"].(model.ListWorkflowsInput))
}
directive1 := func(ctx context.Context) (interface{}, error) {
if ec.directives.Authorized == nil {
@ -13935,10 +13950,10 @@ func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graph
if tmp == nil {
return nil, nil
}
if data, ok := tmp.([]*model.Workflow); ok {
if data, ok := tmp.(*model.ListWorkflowsOutput); ok {
return data, nil
}
return nil, fmt.Errorf(`unexpected type %T from directive, should be []*github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model.Workflow`, tmp)
return nil, fmt.Errorf(`unexpected type %T from directive, should be *github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model.ListWorkflowsOutput`, tmp)
})
if err != nil {
ec.Error(ctx, err)
@ -13950,9 +13965,9 @@ func (ec *executionContext) _Query_ListWorkflow(ctx context.Context, field graph
}
return graphql.Null
}
res := resTmp.([]*model.Workflow)
res := resTmp.(*model.ListWorkflowsOutput)
fc.Result = res
return ec.marshalNWorkflow2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflow(ctx, field.Selections, res)
return ec.marshalNListWorkflowsOutput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsOutput(ctx, field.Selections, res)
}
func (ec *executionContext) _Query_getCharts(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
@ -16642,40 +16657,6 @@ func (ec *executionContext) _User_removed_at(ctx context.Context, field graphql.
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _Workflow_workflow_type(ctx context.Context, field graphql.CollectedField, obj *model.Workflow) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Workflow",
Field: field,
Args: nil,
IsMethod: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.WorkflowType, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _Workflow_workflow_id(ctx context.Context, field graphql.CollectedField, obj *model.Workflow) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
@ -21484,7 +21465,7 @@ func (ec *executionContext) unmarshalInputGetWorkflowRunsInput(ctx context.Conte
}
case "sort":
var err error
it.Sort, err = ec.unmarshalOSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx, v)
it.Sort, err = ec.unmarshalOWorkflowRunSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortInput(ctx, v)
if err != nil {
return it, err
}
@ -21650,6 +21631,48 @@ func (ec *executionContext) unmarshalInputKubeObjectRequest(ctx context.Context,
return it, nil
}
func (ec *executionContext) unmarshalInputListWorkflowsInput(ctx context.Context, obj interface{}) (model.ListWorkflowsInput, error) {
var it model.ListWorkflowsInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "project_id":
var err error
it.ProjectID, err = ec.unmarshalNID2string(ctx, v)
if err != nil {
return it, err
}
case "workflow_ids":
var err error
it.WorkflowIds, err = ec.unmarshalOID2ᚕᚖstring(ctx, v)
if err != nil {
return it, err
}
case "pagination":
var err error
it.Pagination, err = ec.unmarshalOPagination2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐPagination(ctx, v)
if err != nil {
return it, err
}
case "sort":
var err error
it.Sort, err = ec.unmarshalOWorkflowSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortInput(ctx, v)
if err != nil {
return it, err
}
case "filter":
var err error
it.Filter, err = ec.unmarshalOWorkflowFilterInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowFilterInput(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputMemberInput(ctx context.Context, obj interface{}) (model.MemberInput, error) {
var it model.MemberInput
var asMap = obj.(map[string]interface{})
@ -21812,30 +21835,6 @@ func (ec *executionContext) unmarshalInputPodLogRequest(ctx context.Context, obj
return it, nil
}
func (ec *executionContext) unmarshalInputSortInput(ctx context.Context, obj interface{}) (model.SortInput, error) {
var it model.SortInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "field":
var err error
it.Field, err = ec.unmarshalNWorkflowRunSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortingField(ctx, v)
if err != nil {
return it, err
}
case "descending":
var err error
it.Descending, err = ec.unmarshalOBoolean2ᚖbool(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputTemplateInput(ctx context.Context, obj interface{}) (model.TemplateInput, error) {
var it model.TemplateInput
var asMap = obj.(map[string]interface{})
@ -22016,6 +22015,30 @@ func (ec *executionContext) unmarshalInputWeightagesInput(ctx context.Context, o
return it, nil
}
func (ec *executionContext) unmarshalInputWorkflowFilterInput(ctx context.Context, obj interface{}) (model.WorkflowFilterInput, error) {
var it model.WorkflowFilterInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "workflow_name":
var err error
it.WorkflowName, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
case "cluster_name":
var err error
it.ClusterName, err = ec.unmarshalOString2ᚖstring(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputWorkflowRunFilterInput(ctx context.Context, obj interface{}) (model.WorkflowRunFilterInput, error) {
var it model.WorkflowRunFilterInput
var asMap = obj.(map[string]interface{})
@ -22106,6 +22129,54 @@ func (ec *executionContext) unmarshalInputWorkflowRunInput(ctx context.Context,
return it, nil
}
func (ec *executionContext) unmarshalInputWorkflowRunSortInput(ctx context.Context, obj interface{}) (model.WorkflowRunSortInput, error) {
var it model.WorkflowRunSortInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "field":
var err error
it.Field, err = ec.unmarshalNWorkflowRunSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortingField(ctx, v)
if err != nil {
return it, err
}
case "descending":
var err error
it.Descending, err = ec.unmarshalOBoolean2ᚖbool(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputWorkflowSortInput(ctx context.Context, obj interface{}) (model.WorkflowSortInput, error) {
var it model.WorkflowSortInput
var asMap = obj.(map[string]interface{})
for k, v := range asMap {
switch k {
case "field":
var err error
it.Field, err = ec.unmarshalNWorkflowSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortingField(ctx, v)
if err != nil {
return it, err
}
case "descending":
var err error
it.Descending, err = ec.unmarshalOBoolean2ᚖbool(ctx, v)
if err != nil {
return it, err
}
}
}
return it, nil
}
func (ec *executionContext) unmarshalInputcreateDBInput(ctx context.Context, obj interface{}) (model.CreateDBInput, error) {
var it model.CreateDBInput
var asMap = obj.(map[string]interface{})
@ -23341,6 +23412,38 @@ func (ec *executionContext) _Link(ctx context.Context, sel ast.SelectionSet, obj
return out
}
var listWorkflowsOutputImplementors = []string{"ListWorkflowsOutput"}
func (ec *executionContext) _ListWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, obj *model.ListWorkflowsOutput) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, listWorkflowsOutputImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("ListWorkflowsOutput")
case "total_no_of_workflows":
out.Values[i] = ec._ListWorkflowsOutput_total_no_of_workflows(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
case "workflows":
out.Values[i] = ec._ListWorkflowsOutput_workflows(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var maintainerImplementors = []string{"Maintainer"}
func (ec *executionContext) _Maintainer(ctx context.Context, sel ast.SelectionSet, obj *model.Maintainer) graphql.Marshaler {
@ -24190,20 +24293,6 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
}
return res
})
case "getScheduledWorkflows":
field := field
out.Concurrently(i, func() (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Query_getScheduledWorkflows(ctx, field)
if res == graphql.Null {
atomic.AddUint32(&invalids, 1)
}
return res
})
case "ListWorkflow":
field := field
out.Concurrently(i, func() (res graphql.Marshaler) {
@ -24762,11 +24851,6 @@ func (ec *executionContext) _Workflow(ctx context.Context, sel ast.SelectionSet,
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("Workflow")
case "workflow_type":
out.Values[i] = ec._Workflow_workflow_type(ctx, field, obj)
if out.Values[i] == graphql.Null {
invalids++
}
case "workflow_id":
out.Values[i] = ec._Workflow_workflow_id(ctx, field, obj)
if out.Values[i] == graphql.Null {
@ -26305,6 +26389,24 @@ func (ec *executionContext) marshalNLink2ᚖgithubᚗcomᚋlitmuschaosᚋlitmus
return ec._Link(ctx, sel, v)
}
func (ec *executionContext) unmarshalNListWorkflowsInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsInput(ctx context.Context, v interface{}) (model.ListWorkflowsInput, error) {
return ec.unmarshalInputListWorkflowsInput(ctx, v)
}
func (ec *executionContext) marshalNListWorkflowsOutput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, v model.ListWorkflowsOutput) graphql.Marshaler {
return ec._ListWorkflowsOutput(ctx, sel, &v)
}
func (ec *executionContext) marshalNListWorkflowsOutput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐListWorkflowsOutput(ctx context.Context, sel ast.SelectionSet, v *model.ListWorkflowsOutput) graphql.Marshaler {
if v == nil {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
return ec._ListWorkflowsOutput(ctx, sel, v)
}
func (ec *executionContext) marshalNMaintainer2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐMaintainer(ctx context.Context, sel ast.SelectionSet, v model.Maintainer) graphql.Marshaler {
return ec._Maintainer(ctx, sel, &v)
}
@ -26688,43 +26790,6 @@ func (ec *executionContext) marshalNSSHKey2ᚖgithubᚗcomᚋlitmuschaosᚋlitmu
return ec._SSHKey(ctx, sel, v)
}
func (ec *executionContext) marshalNScheduledWorkflows2ᚕᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx context.Context, sel ast.SelectionSet, v []*model.ScheduledWorkflows) graphql.Marshaler {
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalOScheduledWorkflows2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
return ret
}
func (ec *executionContext) marshalNSpec2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSpec(ctx context.Context, sel ast.SelectionSet, v model.Spec) graphql.Marshaler {
return ec._Spec(ctx, sel, &v)
}
@ -26974,6 +27039,15 @@ func (ec *executionContext) marshalNWorkflowRunSortingField2githubᚗcomᚋlitmu
return v
}
func (ec *executionContext) unmarshalNWorkflowSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortingField(ctx context.Context, v interface{}) (model.WorkflowSortingField, error) {
var res model.WorkflowSortingField
return res, res.UnmarshalGQL(v)
}
func (ec *executionContext) marshalNWorkflowSortingField2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortingField(ctx context.Context, sel ast.SelectionSet, v model.WorkflowSortingField) graphql.Marshaler {
return v
}
func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler {
return ec.___Directive(ctx, sel, &v)
}
@ -27708,29 +27782,6 @@ func (ec *executionContext) unmarshalOPagination2ᚖgithubᚗcomᚋlitmuschaos
return &res, err
}
func (ec *executionContext) marshalOScheduledWorkflows2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx context.Context, sel ast.SelectionSet, v model.ScheduledWorkflows) graphql.Marshaler {
return ec._ScheduledWorkflows(ctx, sel, &v)
}
func (ec *executionContext) marshalOScheduledWorkflows2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐScheduledWorkflows(ctx context.Context, sel ast.SelectionSet, v *model.ScheduledWorkflows) graphql.Marshaler {
if v == nil {
return graphql.Null
}
return ec._ScheduledWorkflows(ctx, sel, v)
}
func (ec *executionContext) unmarshalOSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx context.Context, v interface{}) (model.SortInput, error) {
return ec.unmarshalInputSortInput(ctx, v)
}
func (ec *executionContext) unmarshalOSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx context.Context, v interface{}) (*model.SortInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐSortInput(ctx, v)
return &res, err
}
func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) {
return graphql.UnmarshalString(v)
}
@ -27809,6 +27860,18 @@ func (ec *executionContext) marshalOWorkflow2ᚖgithubᚗcomᚋlitmuschaosᚋlit
return ec._Workflow(ctx, sel, v)
}
func (ec *executionContext) unmarshalOWorkflowFilterInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowFilterInput(ctx context.Context, v interface{}) (model.WorkflowFilterInput, error) {
return ec.unmarshalInputWorkflowFilterInput(ctx, v)
}
func (ec *executionContext) unmarshalOWorkflowFilterInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowFilterInput(ctx context.Context, v interface{}) (*model.WorkflowFilterInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOWorkflowFilterInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowFilterInput(ctx, v)
return &res, err
}
func (ec *executionContext) marshalOWorkflowRun2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRun(ctx context.Context, sel ast.SelectionSet, v model.WorkflowRun) graphql.Marshaler {
return ec._WorkflowRun(ctx, sel, &v)
}
@ -27832,6 +27895,18 @@ func (ec *executionContext) unmarshalOWorkflowRunFilterInput2ᚖgithubᚗcomᚋl
return &res, err
}
func (ec *executionContext) unmarshalOWorkflowRunSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortInput(ctx context.Context, v interface{}) (model.WorkflowRunSortInput, error) {
return ec.unmarshalInputWorkflowRunSortInput(ctx, v)
}
func (ec *executionContext) unmarshalOWorkflowRunSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortInput(ctx context.Context, v interface{}) (*model.WorkflowRunSortInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOWorkflowRunSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunSortInput(ctx, v)
return &res, err
}
func (ec *executionContext) unmarshalOWorkflowRunStatus2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowRunStatus(ctx context.Context, v interface{}) (model.WorkflowRunStatus, error) {
var res model.WorkflowRunStatus
return res, res.UnmarshalGQL(v)
@ -27907,6 +27982,18 @@ func (ec *executionContext) marshalOWorkflowRuns2ᚖgithubᚗcomᚋlitmuschaos
return ec._WorkflowRuns(ctx, sel, v)
}
func (ec *executionContext) unmarshalOWorkflowSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortInput(ctx context.Context, v interface{}) (model.WorkflowSortInput, error) {
return ec.unmarshalInputWorkflowSortInput(ctx, v)
}
func (ec *executionContext) unmarshalOWorkflowSortInput2ᚖgithubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortInput(ctx context.Context, v interface{}) (*model.WorkflowSortInput, error) {
if v == nil {
return nil, nil
}
res, err := ec.unmarshalOWorkflowSortInput2githubᚗcomᚋlitmuschaosᚋlitmusᚋlitmusᚑportalᚋgraphqlᚑserverᚋgraphᚋmodelᚐWorkflowSortInput(ctx, v)
return &res, err
}
func (ec *executionContext) marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler {
if v == nil {
return graphql.Null

View File

@ -220,7 +220,7 @@ type GetWorkflowRunsInput struct {
ProjectID string `json:"project_id"`
WorkflowRunIds []*string `json:"workflow_run_ids"`
Pagination *Pagination `json:"pagination"`
Sort *SortInput `json:"sort"`
Sort *WorkflowRunSortInput `json:"sort"`
Filter *WorkflowRunFilterInput `json:"filter"`
}
@ -289,6 +289,19 @@ type Link struct {
URL string `json:"Url"`
}
type ListWorkflowsInput struct {
ProjectID string `json:"project_id"`
WorkflowIds []*string `json:"workflow_ids"`
Pagination *Pagination `json:"pagination"`
Sort *WorkflowSortInput `json:"sort"`
Filter *WorkflowFilterInput `json:"filter"`
}
type ListWorkflowsOutput struct {
TotalNoOfWorkflows int `json:"total_no_of_workflows"`
Workflows []*Workflow `json:"workflows"`
}
type Maintainer struct {
Name string `json:"Name"`
Email string `json:"Email"`
@ -438,11 +451,6 @@ type ScheduledWorkflows struct {
IsRemoved bool `json:"isRemoved"`
}
type SortInput struct {
Field WorkflowRunSortingField `json:"field"`
Descending *bool `json:"descending"`
}
type Spec struct {
DisplayName string `json:"DisplayName"`
CategoryDescription string `json:"CategoryDescription"`
@ -508,7 +516,6 @@ type WeightagesInput struct {
}
type Workflow struct {
WorkflowType string `json:"workflow_type"`
WorkflowID string `json:"workflow_id"`
WorkflowManifest string `json:"workflow_manifest"`
CronSyntax string `json:"cronSyntax"`
@ -526,6 +533,11 @@ type Workflow struct {
WorkflowRuns []*WorkflowRuns `json:"workflow_runs"`
}
type WorkflowFilterInput struct {
WorkflowName *string `json:"workflow_name"`
ClusterName *string `json:"cluster_name"`
}
type WorkflowRun struct {
WorkflowRunID string `json:"workflow_run_id"`
WorkflowID string `json:"workflow_id"`
@ -560,12 +572,22 @@ type WorkflowRunInput struct {
IsRemoved *bool `json:"isRemoved"`
}
type WorkflowRunSortInput struct {
Field WorkflowRunSortingField `json:"field"`
Descending *bool `json:"descending"`
}
type WorkflowRuns struct {
ExecutionData string `json:"execution_data"`
WorkflowRunID string `json:"workflow_run_id"`
LastUpdated string `json:"last_updated"`
}
type WorkflowSortInput struct {
Field WorkflowSortingField `json:"field"`
Descending *bool `json:"descending"`
}
type AnnotationsPromResponse struct {
Queryid string `json:"queryid"`
Legends []*string `json:"legends"`
@ -955,3 +977,42 @@ func (e *WorkflowRunStatus) UnmarshalGQL(v interface{}) error {
func (e WorkflowRunStatus) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type WorkflowSortingField string
const (
WorkflowSortingFieldName WorkflowSortingField = "Name"
)
var AllWorkflowSortingField = []WorkflowSortingField{
WorkflowSortingFieldName,
}
func (e WorkflowSortingField) IsValid() bool {
switch e {
case WorkflowSortingFieldName:
return true
}
return false
}
func (e WorkflowSortingField) String() string {
return string(e)
}
func (e *WorkflowSortingField) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = WorkflowSortingField(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid WorkflowSortingField", str)
}
return nil
}
func (e WorkflowSortingField) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}

View File

@ -152,7 +152,7 @@ input PodLogRequest {
}
type ScheduledWorkflows {
workflow_type:String!
workflow_type: String!
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
@ -169,25 +169,6 @@ type ScheduledWorkflows {
isRemoved: Boolean!
}
type Workflow {
workflow_type:String!
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
cluster_name: String!
workflow_name: String!
workflow_description: String!
weightages: [weightages!]!
isCustomWorkflow: Boolean!
updated_at: String!
created_at: String!
project_id: ID!
cluster_id: ID!
cluster_type: String!
isRemoved: Boolean!
workflow_runs: [WorkflowRuns]
}
type WorkflowRuns {
execution_data: String!
workflow_run_id: ID!
@ -287,10 +268,8 @@ type Query {
users: [User!]! @authorized
# [Deprecated soon]
getScheduledWorkflows(project_id: String!): [ScheduledWorkflows]! @authorized
ListWorkflow(project_id: String!, workflow_ids: [ID]): [Workflow]! @authorized
ListWorkflow(workflowInput: ListWorkflowsInput!): ListWorkflowsOutput!
@authorized
getCharts(HubName: String!, projectID: String!): [Chart!]! @authorized
@ -308,7 +287,8 @@ type Query {
GetPromQuery(query: promInput): promResponse! @authorized
GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse! @authorized
GetPromLabelNamesAndValues(series: promSeriesInput): promSeriesResponse!
@authorized
GetPromSeriesList(ds_details: dsDetails): promSeriesListResponse! @authorized
@ -344,13 +324,16 @@ type Mutation {
## Workflow APIs
# It is used to create chaosworkflow
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse! @authorized
createChaosWorkFlow(input: ChaosWorkFlowInput!): ChaosWorkFlowResponse!
@authorized
reRunChaosWorkFlow(workflowID: String!): String! @authorized
deleteChaosWorkflow(workflowid: String, workflow_run_id: String): Boolean! @authorized
deleteChaosWorkflow(workflowid: String, workflow_run_id: String): Boolean!
@authorized
syncWorkflow(workflowid: String!, workflow_run_id: String!): Boolean! @authorized
syncWorkflow(workflowid: String!, workflow_run_id: String!): Boolean!
@authorized
#Used for sending invitation
sendInvitation(member: MemberInput!): Member @authorized
@ -368,7 +351,8 @@ type Mutation {
leaveProject(member: MemberInput!): String! @authorized
#Used to update project name
updateProjectName(projectID: String!, projectName: String!): String! @authorized
updateProjectName(projectID: String!, projectName: String!): String!
@authorized
#It is used to confirm the subscriber registration
clusterConfirm(identity: ClusterIdentity!): ClusterConfirmResponse!
@ -388,7 +372,8 @@ type Mutation {
syncHub(id: ID!): [MyHubStatus!]! @authorized
updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse! @authorized
updateChaosWorkflow(input: ChaosWorkFlowInput): ChaosWorkFlowResponse!
@authorized
deleteClusterReg(cluster_id: String!): String! @authorized
@ -423,7 +408,8 @@ type Mutation {
deleteDataSource(input: deleteDSInput!): Boolean! @authorized
# Manifest Template
createManifestTemplate(templateInput: TemplateInput): ManifestTemplate! @authorized
createManifestTemplate(templateInput: TemplateInput): ManifestTemplate!
@authorized
deleteManifestTemplate(template_id: String!): Boolean! @authorized
@ -439,7 +425,8 @@ type Mutation {
imageRegistryInfo: imageRegistryInput!
): ImageRegistryResponse! @authorized
deleteImageRegistry(image_registry_id: String!, project_id: String!): String! @authorized
deleteImageRegistry(image_registry_id: String!, project_id: String!): String!
@authorized
}
type Subscription {
@ -453,5 +440,6 @@ type Subscription {
#It is used to listen cluster operation request from the graphql server
clusterConnect(clusterInfo: ClusterIdentity!): ClusterAction!
getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse! @authorized
getKubeObject(kubeObjectRequest: KubeObjectRequest!): KubeObjectResponse!
@authorized
}

View File

@ -337,20 +337,12 @@ func (r *queryResolver) Users(ctx context.Context) ([]*model.User, error) {
return usermanagement.GetUsers(ctx)
}
func (r *queryResolver) GetScheduledWorkflows(ctx context.Context, projectID string) ([]*model.ScheduledWorkflows, error) {
err := validate.ValidateRole(ctx, projectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation)
func (r *queryResolver) ListWorkflow(ctx context.Context, workflowInput model.ListWorkflowsInput) (*model.ListWorkflowsOutput, error) {
err := validate.ValidateRole(ctx, workflowInput.ProjectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation)
if err != nil {
return nil, err
}
return wfHandler.QueryWorkflows(projectID)
}
func (r *queryResolver) ListWorkflow(ctx context.Context, projectID string, workflowIds []*string) ([]*model.Workflow, error) {
err := validate.ValidateRole(ctx, projectID, []model.MemberRole{model.MemberRoleOwner, model.MemberRoleEditor, model.MemberRoleViewer}, usermanagement.AcceptedInvitation)
if err != nil {
return nil, err
}
return wfHandler.QueryListWorkflow(projectID, workflowIds)
return wfHandler.QueryListWorkflow(workflowInput)
}
func (r *queryResolver) GetCharts(ctx context.Context, hubName string, projectID string) ([]*model.Chart, error) {

View File

@ -27,7 +27,7 @@ enum WorkflowRunSortingField {
Time
}
input SortInput {
input WorkflowRunSortInput {
field: WorkflowRunSortingField!
descending: Boolean
}
@ -36,7 +36,7 @@ input GetWorkflowRunsInput {
project_id: ID!
workflow_run_ids: [ID]
pagination: Pagination
sort: SortInput
sort: WorkflowRunSortInput
filter: WorkflowRunFilterInput
}
@ -61,3 +61,48 @@ type GetWorkflowsOutput {
total_no_of_workflow_runs: Int!
workflow_runs: [WorkflowRun]!
}
input WorkflowFilterInput {
workflow_name: String
cluster_name: String
}
input ListWorkflowsInput {
project_id: ID!
workflow_ids: [ID]
pagination: Pagination
sort: WorkflowSortInput
filter: WorkflowFilterInput
}
enum WorkflowSortingField {
Name
}
input WorkflowSortInput {
field: WorkflowSortingField!
descending: Boolean
}
type Workflow {
workflow_id: String!
workflow_manifest: String!
cronSyntax: String!
cluster_name: String!
workflow_name: String!
workflow_description: String!
weightages: [weightages!]!
isCustomWorkflow: Boolean!
updated_at: String!
created_at: String!
project_id: ID!
cluster_id: ID!
cluster_type: String!
isRemoved: Boolean!
workflow_runs: [WorkflowRuns]
}
type ListWorkflowsOutput {
total_no_of_workflows: Int!
workflows: [Workflow]!
}

View File

@ -10,13 +10,12 @@ import (
"strings"
"time"
"go.mongodb.org/mongo-driver/mongo"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
"github.com/jinzhu/copier"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"github.com/google/uuid"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model"
@ -144,12 +143,12 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
var pipeline mongo.Pipeline
// Match with projectID
matchStage := bson.D{
matchProjectIdStage := bson.D{
{"$match", bson.D{
{"project_id", input.ProjectID},
}},
}
pipeline = append(pipeline, matchStage)
pipeline = append(pipeline, matchProjectIdStage)
includeAllFromWorkflow := bson.D{
{"workflow_id", 1},
@ -169,7 +168,7 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
}
// Filter the available workflows where isRemoved is false
matchWfRemovedStage := bson.D{
matchWfRunIsRemovedStage := bson.D{
{"$project", append(includeAllFromWorkflow,
bson.E{Key: "workflow_runs", Value: bson.D{
{"$filter", bson.D{
@ -182,7 +181,7 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
}},
)},
}
pipeline = append(pipeline, matchWfRemovedStage)
pipeline = append(pipeline, matchWfRunIsRemovedStage)
// Match the pipelineIds from the input array
if len(input.WorkflowRunIds) != 0 {
@ -358,7 +357,7 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
var workflows []dbSchemaWorkflow.AggregatedWorkflowRuns
if err = workflowsCursor.All(context.Background(), &workflows); err != nil {
if err = workflowsCursor.All(context.Background(), &workflows); err != nil || len(workflows) == 0 {
fmt.Println(err)
return &model.GetWorkflowsOutput{
TotalNoOfWorkflowRuns: 0,
@ -388,90 +387,163 @@ func QueryWorkflowRuns(input model.GetWorkflowRunsInput) (*model.GetWorkflowsOut
result = append(result, &newWorkflowRun)
}
totalFilteredWorkflowRuns := 0
totalFilteredWorkflowRunsCounter := 0
if len(workflows) > 0 && len(workflows[0].TotalFilteredWorkflowRuns) > 0 {
totalFilteredWorkflowRuns = workflows[0].TotalFilteredWorkflowRuns[0].Count
totalFilteredWorkflowRunsCounter = workflows[0].TotalFilteredWorkflowRuns[0].Count
}
output := model.GetWorkflowsOutput{
TotalNoOfWorkflowRuns: totalFilteredWorkflowRuns,
TotalNoOfWorkflowRuns: totalFilteredWorkflowRunsCounter,
WorkflowRuns: result,
}
return &output, nil
}
// Deprecated
func QueryWorkflows(project_id string) ([]*model.ScheduledWorkflows, error) {
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(bson.D{{"project_id", project_id}})
if err != nil {
return nil, err
}
result := []*model.ScheduledWorkflows{}
for _, workflow := range chaosWorkflows {
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
if err != nil {
return nil, err
}
if workflow.IsRemoved == false {
var Weightages []*model.Weightages
copier.Copy(&Weightages, &workflow.Weightages)
newChaosWorkflows := model.ScheduledWorkflows{
WorkflowType: string(workflow.WorkflowType),
WorkflowID: workflow.WorkflowID,
WorkflowManifest: workflow.WorkflowManifest,
WorkflowName: workflow.WorkflowName,
CronSyntax: workflow.CronSyntax,
WorkflowDescription: workflow.WorkflowDescription,
Weightages: Weightages,
IsCustomWorkflow: workflow.IsCustomWorkflow,
UpdatedAt: workflow.UpdatedAt,
CreatedAt: workflow.CreatedAt,
ProjectID: workflow.ProjectID,
IsRemoved: workflow.IsRemoved,
ClusterName: cluster.ClusterName,
ClusterID: cluster.ClusterID,
ClusterType: cluster.ClusterType,
}
result = append(result, &newChaosWorkflows)
}
}
return result, nil
}
// QueryListWorkflow returns all the workflows present in the given project
func QueryListWorkflow(project_id string, workflowIds []*string) ([]*model.Workflow, error) {
var query bson.D
if len(workflowIds) != 0 {
query = bson.D{
{"project_id", project_id},
{"workflow_id", bson.M{"$in": workflowIds}},
func QueryListWorkflow(workflowInput model.ListWorkflowsInput) (*model.ListWorkflowsOutput, error) {
var pipeline mongo.Pipeline
// Match with projectID
matchProjectIdStage := bson.D{
{"$match", bson.D{
{"project_id", workflowInput.ProjectID},
}},
}
pipeline = append(pipeline, matchProjectIdStage)
// Match the pipelineIds from the input array
if len(workflowInput.WorkflowIds) != 0 {
matchWfIdStage := bson.D{
{"$match", bson.D{
{"workflow_id", bson.D{
{"$in", workflowInput.WorkflowIds},
}},
}},
}
} else {
query = bson.D{
{"project_id", project_id},
pipeline = append(pipeline, matchWfIdStage)
}
// Filtering out the workflows that are deleted/removed
matchWfIsRemovedStage := bson.D{
{"$match", bson.D{
{"isRemoved", bson.D{
{"$eq", false},
}},
}},
}
pipeline = append(pipeline, matchWfIsRemovedStage)
// Filtering based on multiple parameters
if workflowInput.Filter != nil {
// Filtering based on workflow name
if workflowInput.Filter.WorkflowName != nil && *workflowInput.Filter.WorkflowName != "" {
matchWfNameStage := bson.D{
{"$match", bson.D{
{"workflow_name", bson.D{
{"$regex", workflowInput.Filter.WorkflowName},
}},
}},
}
pipeline = append(pipeline, matchWfNameStage)
}
// Filtering based on cluster name
if workflowInput.Filter.ClusterName != nil && *workflowInput.Filter.ClusterName != "All" && *workflowInput.Filter.ClusterName != "" {
matchClusterStage := bson.D{
{"$match", bson.D{
{"cluster_name", workflowInput.Filter.ClusterName},
}},
}
pipeline = append(pipeline, matchClusterStage)
}
}
chaosWorkflows, err := dbOperationsWorkflow.GetWorkflows(query)
var sortStage bson.D
switch {
case workflowInput.Sort != nil && workflowInput.Sort.Field == model.WorkflowSortingFieldName:
// Sorting based on WorkflowName time
if workflowInput.Sort.Descending != nil && *workflowInput.Sort.Descending {
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_name", -1},
}},
}
} else {
sortStage = bson.D{
{"$sort", bson.D{
{"workflow_name", 1},
}},
}
}
default:
// Default sorting: sorts it by LastUpdated time in descending order
sortStage = bson.D{
{"$sort", bson.D{
{"updated_at", -1},
}},
}
}
// Pagination
paginatedWorkflows := bson.A{
sortStage,
}
if workflowInput.Pagination != nil {
paginationSkipStage := bson.D{
{"$skip", workflowInput.Pagination.Page * workflowInput.Pagination.Limit},
}
paginationLimitStage := bson.D{
{"$limit", workflowInput.Pagination.Limit},
}
paginatedWorkflows = append(paginatedWorkflows, paginationSkipStage, paginationLimitStage)
}
// Add two stages where we first count the number of filtered workflow and then paginate the results
facetStage := bson.D{
{"$facet", bson.D{
{"total_filtered_workflows", bson.A{
bson.D{{"$count", "count"}},
}},
{"scheduled_workflows", paginatedWorkflows},
}},
}
pipeline = append(pipeline, facetStage)
// Call aggregation on pipeline
workflowsCursor, err := dbOperationsWorkflow.GetAggregateWorkflows(pipeline)
if err != nil {
return nil, err
}
var result []*model.Workflow
for _, workflow := range chaosWorkflows {
var workflows []dbSchemaWorkflow.AggregatedWorkflows
if err = workflowsCursor.All(context.Background(), &workflows); err != nil || len(workflows) == 0 {
return &model.ListWorkflowsOutput{
TotalNoOfWorkflows: 0,
Workflows: result,
}, nil
}
for _, workflow := range workflows[0].ScheduledWorkflows {
cluster, err := dbOperationsCluster.GetCluster(workflow.ClusterID)
if err != nil {
return nil, err
}
var Weightages []*model.Weightages
copier.Copy(&Weightages, &workflow.Weightages)
var WorkflowRuns []*model.WorkflowRuns
copier.Copy(&WorkflowRuns, &workflow.WorkflowRuns)
newChaosWorkflows := model.Workflow{
WorkflowType: string(workflow.WorkflowType),
WorkflowID: workflow.WorkflowID,
WorkflowManifest: workflow.WorkflowManifest,
WorkflowName: workflow.WorkflowName,
@ -490,7 +562,17 @@ func QueryListWorkflow(project_id string, workflowIds []*string) ([]*model.Workf
}
result = append(result, &newChaosWorkflows)
}
return result, nil
totalFilteredWorkflowsCounter := 0
if len(workflows) > 0 && len(workflows[0].TotalFilteredWorkflows) > 0 {
totalFilteredWorkflowsCounter = workflows[0].TotalFilteredWorkflows[0].Count
}
output := model.ListWorkflowsOutput{
TotalNoOfWorkflows: totalFilteredWorkflowsCounter,
Workflows: result,
}
return &output, nil
}
// WorkFlowRunHandler Updates or Inserts a new Workflow Run into the DB

View File

@ -47,11 +47,11 @@ type ChaosWorkflowRun struct {
}
type AggregatedWorkflowRuns struct {
TotalFilteredWorkflowRuns []TotalFilteredWorkflowRuns `bson:"total_filtered_workflow_runs"`
FlattenedWorkflowRuns []FlattenedWorkflowRun `bson:"flattened_workflow_runs"`
TotalFilteredWorkflowRuns []TotalFilteredData `bson:"total_filtered_workflow_runs"`
FlattenedWorkflowRuns []FlattenedWorkflowRun `bson:"flattened_workflow_runs"`
}
type TotalFilteredWorkflowRuns struct {
type TotalFilteredData struct {
Count int `bson:"count"`
}
@ -72,3 +72,8 @@ type FlattenedWorkflowRun struct {
WorkflowRuns ChaosWorkflowRun `bson:"workflow_runs"`
IsRemoved bool `bson:"isRemoved"`
}
type AggregatedWorkflows struct {
TotalFilteredWorkflows []TotalFilteredData `bson:"total_filtered_workflows"`
ScheduledWorkflows []ChaosWorkFlowInput `bson:"scheduled_workflows"`
}