(api-refactor): Refactored GQL server APIs and updated frontend dir (#3579)

* Updated GQL Schema naming convention and docs (#3434)

* schema fix for naming convention and doc

Signed-off-by: Soumya Ghosh Dastidar <gdsoumya@gmail.com>

* splitted the schema into multiple gql files

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* rearranged queries

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* added comments

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* Updated chaoshub code docs and removed unused models

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* userClusterReg -> registerCluster

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* added cluster mutation API changes

Signed-off-by: arkajyotiMukherjee <arko@chaosnative.com>

* Interfaces + queries updated, pages WIP

Signed-off-by: Vansh Bhatia <vansh@chaosnative.com>

* frontend queries refactored

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* more mutation changes

Signed-off-by: Arkajyoti Mukherjee <arkajyoti31@gmail.com>

* more mutation changes

Signed-off-by: Arkajyoti Mukherjee <arkajyoti31@gmail.com>

* mutations refactored

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* Fixed handler name

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* All errors fixed, frontend compiling, LogSwitcher fix WIP

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* Log Switcher fixed

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* linter fixes

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* Updated ChaosHub mutations

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Changed query names and parameter accordint to convention in backend

Signed-off-by: Saranya-jena <saranya.jena@harness.io>

* Changed query names and parameters in FE

Signed-off-by: Saranya-jena <saranya.jena@harness.io>

* chaosHub fixes

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* Updated subscriber schema and minor refactoring

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* frontend fixes

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* Renamed get to list in queries

Signed-off-by: SarthakJain26 <sarthak@chaosnative.com>

* Fixed enums issue

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Fixed workflow related schemas in FE and minor change in ENUMs

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Added FE integration for workflow creation

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Updated analytics queries in FE

Signed-off-by: Saranya-jena <saranya.jena@harness.io>

* Updated chaoshub queries in FE

Signed-off-by: Saranya-jena <saranya.jena@harness.io>

* Updated workflows, logs and template queries and mutations in FE

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Deepscan fix

Signed-off-by: Vansh Bhatia <vansh.bhatia@harness.io>

* Fixed analytics, myhub, usage and project related queries

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* fixed workflow stats page

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Updated payload data type in subscriber

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* minor fix

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Fixed analytics and delete workflow issue

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Fixed usage stats schema in frontend and backend

Signed-off-by: Saranya-jena <saranya.jena@harness.io>

* Added fix username fetching issue in usage table

Signed-off-by: Saranya-jena <saranya.jena@harness.io>

* Added query for agent details

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

* Updated go mod in auth svr

Signed-off-by: Amit Kumar Das <amit@chaosnative.com>

Co-authored-by: Soumya Ghosh Dastidar <44349253+gdsoumya@users.noreply.github.com>
Co-authored-by: arkajyotiMukherjee <arko@chaosnative.com>
Co-authored-by: Vansh Bhatia <vansh@chaosnative.com>
Co-authored-by: Vansh Bhatia <vansh.bhatia@harness.io>
Co-authored-by: Arkajyoti Mukherjee <arkajyoti31@gmail.com>
Co-authored-by: Saranya-jena <saranya.jena@harness.io>
Co-authored-by: SarthakJain26 <sarthak@chaosnative.com>
This commit is contained in:
Amit Kumar Das 2022-05-11 11:26:12 +05:30 committed by GitHub
parent 4d71cea292
commit 3f8021f09b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
194 changed files with 50910 additions and 19789 deletions

View File

@ -12,7 +12,7 @@ require (
github.com/kelseyhightower/envconfig v1.4.0
github.com/sirupsen/logrus v1.4.2
go.mongodb.org/mongo-driver v1.5.3
golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a
golang.org/x/crypto v0.0.0-20220315160706-3147a52a75dd
golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f
google.golang.org/grpc v1.32.0
google.golang.org/protobuf v1.27.1

View File

@ -254,8 +254,8 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a h1:kr2P4QFmQr29mSLA43kwrOcgcReGTfbE9N577tCTuBc=
golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
golang.org/x/crypto v0.0.0-20220315160706-3147a52a75dd h1:XcWmESyNjXJMLahc3mqVQJcgSTDxFxhETVlfk9uGc38=
golang.org/x/crypto v0.0.0-20220315160706-3147a52a75dd/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -313,8 +313,8 @@ golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw9KJexJuRLI9G7Hp5wE=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -364,16 +364,19 @@ golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68 h1:nxC68pudNYkKU6jWhgrqdreuFiOQWj1Fs7T3VrH4Pjw=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1 h1:SrN+KX8Art/Sf4HNj6Zcz06G7VEz+7w9tdXTPOZ7+l4=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=

View File

@ -12,7 +12,7 @@ type Project struct {
}
type Owner struct {
UserId string `bson:"user_id"`
UserID string `bson:"user_id"`
Username string `bson:"username"`
}
type MemberStat struct {
@ -22,7 +22,7 @@ type MemberStat struct {
type ProjectStats struct {
Name string `bson:"name"`
ProjectId string `bson:"_id"`
ProjectID string `bson:"_id"`
Members *MemberStat `bson:"memberStat"`
}

View File

@ -72,7 +72,7 @@ func getChaosData(nodeStatus v1alpha13.NodeStatus, engineName, engineNS string,
return cd, nil
}
// util function, checks if event is a chaos-exp event, if so - extract the chaos data
//CheckChaosData util function, checks if event is a chaos-exp event, if so - extract the chaos data
func CheckChaosData(nodeStatus v1alpha13.NodeStatus, workflowNS string, chaosClient *v1alpha12.LitmuschaosV1alpha1Client) (string, *types.ChaosData, error) {
nodeType := string(nodeStatus.Type)
var cd *types.ChaosData = nil
@ -115,7 +115,7 @@ func getNameFromLog(log string) string {
return name[1]
}
// converts unix timestamp to string
//StrConvTime converts unix timestamp to string
func StrConvTime(time int64) string {
if time < 0 {
return ""
@ -147,9 +147,9 @@ func GetWorkflowObj(uid string) (*v1alpha1.Workflow, error) {
return nil, nil
}
// generate graphql mutation payload for events event
//GenerateWorkflowPayload generate graphql mutation payload for events event
func GenerateWorkflowPayload(cid, accessKey, version, completed string, wfEvent types.WorkflowEvent) ([]byte, error) {
clusterID := `{cluster_id: \"` + cid + `\", version: \"` + version + `\", access_key: \"` + accessKey + `\"}`
clusterID := `{clusterID: \"` + cid + `\", version: \"` + version + `\", accessKey: \"` + accessKey + `\"}`
for id, event := range wfEvent.Nodes {
event.Message = strings.Replace(event.Message, `"`, ``, -1)
@ -160,7 +160,7 @@ func GenerateWorkflowPayload(cid, accessKey, version, completed string, wfEvent
if err != nil {
return nil, err
}
mutation := `{ workflow_id: \"` + wfEvent.WorkflowID + `\", workflow_run_id: \"` + wfEvent.UID + `\", completed: ` + completed + `, workflow_name:\"` + wfEvent.Name + `\", cluster_id: ` + clusterID + `, executed_by:\"` + wfEvent.ExecutedBy + `\", execution_data:\"` + processed[1:len(processed)-1] + `\"}`
var payload = []byte(`{"query":"mutation { chaosWorkflowRun(workflowData:` + mutation + ` )}"}`)
mutation := `{ workflowID: \"` + wfEvent.WorkflowID + `\", workflowRunID: \"` + wfEvent.UID + `\", completed: ` + completed + `, workflowName:\"` + wfEvent.Name + `\", clusterID: ` + clusterID + `, executedBy:\"` + wfEvent.ExecutedBy + `\", executionData:\"` + processed[1:len(processed)-1] + `\"}`
var payload = []byte(`{"query":"mutation { chaosWorkflowRun(request:` + mutation + ` )}"}`)
return payload, nil
}

View File

@ -38,7 +38,7 @@ var (
ClusterID = os.Getenv("CLUSTER_ID")
)
// initializes the Argo Workflow event watcher
//WorkflowEventWatcher initializes the Argo Workflow event watcher
func WorkflowEventWatcher(stopCh chan struct{}, stream chan types.WorkflowEvent, clusterData map[string]string) {
startTime, err := strconv.Atoi(clusterData["START_TIME"])
if err != nil {
@ -48,17 +48,21 @@ func WorkflowEventWatcher(stopCh chan struct{}, stream chan types.WorkflowEvent,
if err != nil {
logrus.WithError(err).Fatal("Could not get kube config")
}
// ClientSet to create Informer
clientSet, err := versioned.NewForConfig(cfg)
if err != nil {
logrus.WithError(err).Fatal("Could not generate dynamic client for config")
}
// Create a factory object to watch workflows depending on default scope
f := externalversions.NewSharedInformerFactoryWithOptions(clientSet, resyncPeriod,
externalversions.WithTweakListOptions(func(list *v1.ListOptions) {
list.LabelSelector = fmt.Sprintf("cluster_id=%s,workflows.argoproj.io/controller-instanceid=%s", ClusterID, ClusterID)
}))
informer := f.Argoproj().V1alpha1().Workflows().Informer()
if AgentScope == "namespace" {
f = externalversions.NewSharedInformerFactoryWithOptions(clientSet, resyncPeriod, externalversions.WithNamespace(AgentNamespace),
externalversions.WithTweakListOptions(func(list *v1.ListOptions) {
@ -70,7 +74,7 @@ func WorkflowEventWatcher(stopCh chan struct{}, stream chan types.WorkflowEvent,
go startWatchWorkflow(stopCh, informer, stream, int64(startTime))
}
// handles the different events events - add, update and delete
//startWatchWorkflow handles the different events events - add, update and delete
func startWatchWorkflow(stopCh <-chan struct{}, s cache.SharedIndexInformer, stream chan types.WorkflowEvent, startTime int64) {
handlers := cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
@ -99,7 +103,7 @@ func startWatchWorkflow(stopCh <-chan struct{}, s cache.SharedIndexInformer, str
s.Run(stopCh)
}
// responsible for extracting the required data from the event and streaming
//WorkflowEventHandler responsible for extracting the required data from the event and streaming
func WorkflowEventHandler(workflowObj *v1alpha1.Workflow, eventType string, startTime int64) (types.WorkflowEvent, error) {
if workflowObj.Labels["workflow_id"] == "" {
logrus.WithFields(map[string]interface{}{

View File

@ -29,7 +29,7 @@ func SendRequest(server string, payload []byte) (string, error) {
return string(body), nil
}
// process event data into proper format acceptable by graphql
// MarshalGQLData processes event data into proper format acceptable by graphql
func MarshalGQLData(gqlData interface{}) (string, error) {
data, err := json.Marshal(gqlData)
if err != nil {

View File

@ -10,7 +10,7 @@ import (
var KubeConfig *string
//getKubeConfig setup the config for access cluster resource
// GetKubeConfig sets up the config for access cluster resource
func GetKubeConfig() (*rest.Config, error) {
// Use in-cluster config if kubeconfig path is not specified
if *KubeConfig == "" {
@ -28,7 +28,7 @@ func GetGenericK8sClient() (*kubernetes.Clientset, error) {
return kubernetes.NewForConfig(config)
}
//This function returns dynamic client and discovery client
// GetDynamicAndDiscoveryClient returns dynamic client and discovery client
func GetDynamicAndDiscoveryClient() (discovery.DiscoveryInterface, dynamic.Interface, error) {
// returns a config object which uses the service account kubernetes gives to pods
config, err := GetKubeConfig()

View File

@ -52,7 +52,7 @@ func GetLogs(podName, namespace, container string) (string, error) {
return str, nil
}
// create pod log for normal pods and chaos-engine pods
//CreatePodLog creates pod log for normal pods and chaos-engine pods
func CreatePodLog(podLog types.PodLogRequest) (types.PodLog, error) {
logDetails := types.PodLog{}
mainLog, err := GetLogs(podLog.PodName, podLog.PodNamespace, "main")
@ -109,7 +109,7 @@ func SendPodLogs(clusterData map[string]string, podLog types.PodLogRequest) {
}
func GenerateLogPayload(cid, accessKey, version string, podLog types.PodLogRequest) ([]byte, error) {
clusterID := `{cluster_id: \"` + cid + `\", version: \"` + version + `\", access_key: \"` + accessKey + `\"}`
clusterID := `{clusterID: \"` + cid + `\", version: \"` + version + `\", accessKey: \"` + accessKey + `\"}`
processed := " Could not get logs "
// get the logs
@ -122,8 +122,8 @@ func GenerateLogPayload(cid, accessKey, version string, podLog types.PodLogReque
}
}
mutation := `{ cluster_id: ` + clusterID + `, request_id:\"` + podLog.RequestID + `\", workflow_run_id: \"` + podLog.WorkflowRunID + `\", pod_name: \"` + podLog.PodName + `\", pod_type: \"` + podLog.PodType + `\", log:\"` + processed[1:len(processed)-1] + `\"}`
var payload = []byte(`{"query":"mutation { podLog(log:` + mutation + ` )}"}`)
mutation := `{ clusterID: ` + clusterID + `, requestID:\"` + podLog.RequestID + `\", workflowRunID: \"` + podLog.WorkflowRunID + `\", podName: \"` + podLog.PodName + `\", podType: \"` + podLog.PodType + `\", log:\"` + processed[1:len(processed)-1] + `\"}`
var payload = []byte(`{"query":"mutation { podLog(request:` + mutation + ` )}"}`)
return payload, nil
}

View File

@ -28,7 +28,7 @@ func GetKubernetesObjects(request types.KubeObjRequest) ([]*types.KubeObject, er
if err != nil {
return nil, err
}
clientset, err := kubernetes.NewForConfig(conf)
clientSet, err := kubernetes.NewForConfig(conf)
if err != nil {
return nil, err
}
@ -55,7 +55,7 @@ func GetKubernetesObjects(request types.KubeObjRequest) ([]*types.KubeObject, er
}
ObjData = append(ObjData, KubeObj)
} else {
namespace, err := clientset.CoreV1().Namespaces().List(context.TODO(), metav1.ListOptions{})
namespace, err := clientSet.CoreV1().Namespaces().List(context.TODO(), metav1.ListOptions{})
if err != nil {
return nil, err
}
@ -73,7 +73,7 @@ func GetKubernetesObjects(request types.KubeObjRequest) ([]*types.KubeObject, er
ObjData = append(ObjData, KubeObj)
}
} else {
return nil, errors.New("No namespace available")
return nil, errors.New("no namespace available")
}
}
@ -108,9 +108,9 @@ func GetObjectDataByNamespace(namespace string, dynamicClient dynamic.Interface,
return kubeObjects, nil
}
func GenerateKubeObject(cid string, accessKey, version string, kubeobjectrequest types.KubeObjRequest) ([]byte, error) {
clusterID := `{cluster_id: \"` + cid + `\", version: \"` + version + `\", access_key: \"` + accessKey + `\"}`
kubeObj, err := GetKubernetesObjects(kubeobjectrequest)
func GenerateKubeObject(cid string, accessKey, version string, kubeObjectRequest types.KubeObjRequest) ([]byte, error) {
clusterID := `{clusterID: \"` + cid + `\", version: \"` + version + `\", accessKey: \"` + accessKey + `\"}`
kubeObj, err := GetKubernetesObjects(kubeObjectRequest)
if err != nil {
return nil, err
}
@ -118,16 +118,16 @@ func GenerateKubeObject(cid string, accessKey, version string, kubeobjectrequest
if err != nil {
return nil, err
}
mutation := `{ cluster_id: ` + clusterID + `, request_id:\"` + kubeobjectrequest.RequestID + `\", kube_obj:\"` + processed[1:len(processed)-1] + `\"}`
mutation := `{ clusterID: ` + clusterID + `, requestID:\"` + kubeObjectRequest.RequestID + `\", kubeObj:\"` + processed[1:len(processed)-1] + `\"}`
var payload = []byte(`{"query":"mutation { kubeObj(kubeData:` + mutation + ` )}"}`)
var payload = []byte(`{"query":"mutation { kubeObj(request:` + mutation + ` )}"}`)
return payload, nil
}
//SendKubeObjects generates graphql mutation to send kubernetes objects data to graphql server
func SendKubeObjects(clusterData map[string]string, kubeobjectrequest types.KubeObjRequest) error {
func SendKubeObjects(clusterData map[string]string, kubeObjectRequest types.KubeObjRequest) error {
// generate graphql payload
payload, err := GenerateKubeObject(clusterData["CLUSTER_ID"], clusterData["ACCESS_KEY"], clusterData["VERSION"], kubeobjectrequest)
payload, err := GenerateKubeObject(clusterData["CLUSTER_ID"], clusterData["ACCESS_KEY"], clusterData["VERSION"], kubeObjectRequest)
if err != nil {
logrus.WithError(err).Print("Error while getting KubeObject Data")
return err

View File

@ -208,7 +208,7 @@ func applyRequest(requestType string, obj *unstructured.Unstructured) (*unstruct
if requestType == "create" {
response, err := dr.Create(ctx, obj, metav1.CreateOptions{})
if k8s_errors.IsAlreadyExists(err) {
// This doesnt ever happen even if it does already exist
// This doesn't ever happen even if it does already exist
logrus.Info("Already exists")
return nil, nil
}
@ -222,7 +222,7 @@ func applyRequest(requestType string, obj *unstructured.Unstructured) (*unstruct
} else if requestType == "update" {
getObj, err := dr.Get(ctx, obj.GetName(), metav1.GetOptions{})
if k8s_errors.IsNotFound(err) {
// This doesnt ever happen even if it is already deleted or not found
// This doesn't ever happen even if it is already deleted or not found
logrus.Info("%v not found", obj.GetName())
return nil, nil
}
@ -243,7 +243,7 @@ func applyRequest(requestType string, obj *unstructured.Unstructured) (*unstruct
} else if requestType == "delete" {
err := dr.Delete(ctx, obj.GetName(), metav1.DeleteOptions{})
if k8s_errors.IsNotFound(err) {
// This doesnt ever happen even if it is already deleted or not found
// This doesn't ever happen even if it is already deleted or not found
logrus.Info("%v not found", obj.GetName())
return nil, nil
}
@ -257,7 +257,7 @@ func applyRequest(requestType string, obj *unstructured.Unstructured) (*unstruct
} else if requestType == "get" {
response, err := dr.Get(ctx, obj.GetName(), metav1.GetOptions{})
if k8s_errors.IsNotFound(err) {
// This doesnt ever happen even if it is already deleted or not found
// This doesn't ever happen even if it is already deleted or not found
logrus.Info("%v not found", obj.GetName())
return nil, nil
}
@ -287,7 +287,7 @@ func addCustomLabels(obj *unstructured.Unstructured, customLabels map[string]str
obj.SetLabels(newLabels)
}
// This function handles cluster operations
// ClusterOperations handles cluster operations
func ClusterOperations(clusterAction types.Action) (*unstructured.Unstructured, error) {
// Converting JSON to YAML and store it in yamlStr variable
@ -333,7 +333,7 @@ func ClusterOperations(clusterAction types.Action) (*unstructured.Unstructured,
}
func ClusterConfirm(clusterData map[string]string) ([]byte, error) {
payload := `{"query":"mutation{ clusterConfirm(identity: {cluster_id: \"` + clusterData["CLUSTER_ID"] + `\", version: \"` + clusterData["VERSION"] + `\", access_key: \"` + clusterData["ACCESS_KEY"] + `\"}){isClusterConfirmed newAccessKey cluster_id}}"}`
payload := `{"query":"mutation{ confirmClusterRegistration(request: {clusterID: \"` + clusterData["CLUSTER_ID"] + `\", version: \"` + clusterData["VERSION"] + `\", accessKey: \"` + clusterData["ACCESS_KEY"] + `\"}){isClusterConfirmed newAccessKey clusterID}}"}`
resp, err := graphql.SendRequest(clusterData["SERVER_ADDR"], []byte(payload))
if err != nil {
return nil, err

View File

@ -15,7 +15,7 @@ import (
)
func ClusterConnect(clusterData map[string]string) {
query := `{"query":"subscription {\n clusterConnect(clusterInfo: {cluster_id: \"` + clusterData["CLUSTER_ID"] + `\", version: \"` + clusterData["VERSION"] + `\", access_key: \"` + clusterData["ACCESS_KEY"] + `\"}) {\n \t project_id,\n action{\n k8s_manifest,\n external_data,\n request_type\n username\n namespace\n }\n }\n}\n"}`
query := `{"query":"subscription {\n clusterConnect(clusterInfo: {clusterID: \"` + clusterData["CLUSTER_ID"] + `\", version: \"` + clusterData["VERSION"] + `\", accessKey: \"` + clusterData["ACCESS_KEY"] + `\"}) {\n \t projectID,\n action{\n k8sManifest,\n externalData,\n requestType\n username\n namespace\n }\n }\n}\n"}`
serverURL, err := url.Parse(clusterData["SERVER_ADDR"])
if err != nil {
logrus.WithError(err).Fatal("Failed to parse URL")
@ -72,7 +72,6 @@ func ClusterConnect(clusterData map[string]string) {
if err != nil {
logrus.WithError(err).Fatal("Failed to read message")
}
var r types.RawData
err = json.Unmarshal(message, &r)
if err != nil {
@ -90,7 +89,6 @@ func ClusterConnect(clusterData map[string]string) {
logrus.Error("Error response from the server : ", string(message))
continue
}
err = RequestProcessor(clusterData, r)
if err != nil {
logrus.WithError(err).Error("Error on processing request")
@ -103,7 +101,6 @@ func RequestProcessor(clusterData map[string]string, r types.RawData) error {
KubeObjRequest := types.KubeObjRequest{
RequestID: r.Payload.Data.ClusterConnect.ProjectID,
}
err := json.Unmarshal([]byte(r.Payload.Data.ClusterConnect.Action.ExternalData), &KubeObjRequest)
if err != nil {
return errors.New("failed to json unmarshal: " + err.Error())

View File

@ -19,43 +19,30 @@ type Payload struct {
}
type Data struct {
ClusterConfirm ClusterConfirm `json:"clusterConfirm"`
ClusterConfirm ClusterConfirm `json:"confirmClusterRegistration"`
ClusterConnect ClusterConnect `json:"clusterConnect"`
}
type ClusterConfirm struct {
IsClusterConfirmed bool `json:isClusterConfirmed`
NewAccessKey string `json:newAccessKey`
ClusterID string `json:cluster_id`
IsClusterConfirmed bool `json:"isClusterConfirmed"`
NewAccessKey string `json:"newAccessKey"`
ClusterID string `json:"clusterID"`
}
type ClusterConnect struct {
ProjectID string `json:"project_id"`
ProjectID string `json:"projectID"`
Action Action `json:"action"`
}
type KubeObjRequest struct {
RequestID string
ClusterID string `json:"cluster_id"`
ObjectType string `json:"object_type"`
KubeGVRRequest KubeGVRRequest `json:"kube_obj_request"`
}
type KubeGVRRequest struct {
Group string `json:"group"`
Version string `json:"version"`
Resource string `json:"resource"`
}
type Action struct {
K8SManifest string `json:"k8s_manifest"`
ExternalData string `json:"external_data"`
RequestType string `json:"request_type"`
K8SManifest string `json:"k8sManifest"`
ExternalData string `json:"externalData"`
RequestType string `json:"requestType"`
Username string `json:"username"`
Namespace string `json:"namespace"`
}
type WorkflowSyncExternalData struct {
WorkflowID string `json:"workflow_id"`
WorkflowRunID string `json:"workflow_run_id"`
WorkflowID string `json:"workflowID"`
WorkflowRunID string `json:"workflowRunID"`
}

View File

@ -2,11 +2,11 @@ package types
import "github.com/litmuschaos/chaos-operator/pkg/apis/litmuschaos/v1alpha1"
// events data
// WorkflowEvent consists of workflow related data
type WorkflowEvent struct {
WorkflowType string `json:"workflow_type"`
WorkflowType string `json:"workflowType"`
WorkflowID string `json:"-"`
EventType string `json:"event_type"`
EventType string `json:"eventType"`
UID string `json:"-"`
Namespace string `json:"namespace"`
Name string `json:"name"`
@ -16,10 +16,10 @@ type WorkflowEvent struct {
StartedAt string `json:"startedAt"`
FinishedAt string `json:"finishedAt"`
Nodes map[string]Node `json:"nodes"`
ExecutedBy string `json:"executed_by"`
ExecutedBy string `json:"executedBy"`
}
// each node/step data
// Node consist of node/step data
type Node struct {
Name string `json:"name"`
Phase string `json:"phase"`
@ -31,7 +31,7 @@ type Node struct {
ChaosExp *ChaosData `json:"chaosData,omitempty"`
}
// chaos data
// ChaosData consists of ChaosEngine related data
type ChaosData struct {
EngineUID string `json:"engineUID"`
EngineContext string `json:"engineContext"`

View File

@ -6,19 +6,34 @@ import (
"k8s.io/apimachinery/pkg/types"
)
type KubeObjRequest struct {
RequestID string
ClusterID string `json:"clusterID"`
ObjectType string `json:"objectType"`
KubeGVRRequest KubeGVRRequest `json:"kubeObjRequest"`
}
type KubeGVRRequest struct {
Group string `json:"group"`
Version string `json:"version"`
Resource string `json:"resource"`
}
//KubeObject consists of all the namespaces and its related K8S object details
type KubeObject struct {
Namespace string `json:"namespace"`
Data []ObjectData `json:"data"`
}
//ObjectData consists of Kubernetes Objects related details
type ObjectData struct {
Name string `json:"name"`
UID types.UID `json:"uid"`
Namespace string `json:"namespace"`
APIVersion string `json:"api_version"`
CreationTimestamp metav1.Time `json:"creation_timestamp"`
APIVersion string `json:"apiVersion"`
CreationTimestamp metav1.Time `json:"creationTimestamp"`
Containers []v1.Container `json:"containers"`
TerminationGracePeriods *int64 `json:"termination_grace_periods"`
TerminationGracePeriods *int64 `json:"terminationGracePeriods"`
Volumes []v1.Volume `json:"volumes"`
Labels map[string]string `json:"labels"`
}

View File

@ -2,17 +2,18 @@ package types
type PodLogRequest struct {
RequestID string
ClusterID string `json:"cluster_id"`
WorkflowRunID string `json:"workflow_run_id"`
PodName string `json:"pod_name"`
PodNamespace string `json:"pod_namespace"`
PodType string `json:"pod_type"`
ExpPod *string `json:"exp_pod"`
RunnerPod *string `json:"runner_pod"`
ChaosNamespace *string `json:"chaos_namespace"`
ClusterID string `json:"clusterID"`
WorkflowRunID string `json:"workflowRunID"`
PodName string `json:"podName"`
PodNamespace string `json:"podNamespace"`
PodType string `json:"podType"`
ExpPod *string `json:"expPod"`
RunnerPod *string `json:"runnerPod"`
ChaosNamespace *string `json:"chaosNamespace"`
}
// PodLog consists logs from Chaos related pods and experiment pods
type PodLog struct {
MainPod string `json:"main_logs"`
ChaosPod map[string]string `json:"chaos_logs",omitempty`
MainPod string `json:"mainLogs"`
ChaosPod map[string]string `json:"chaosLogs",omitempty`
}

File diff suppressed because it is too large Load Diff

View File

@ -73,7 +73,7 @@ const LocalQuickActionCard: React.FC<LocalQuickActionCardProps> = ({
// TODO: settings only accessible by Owner
(homePage || returningHome || community || observability) &&
getProjectRole() === Role.owner
getProjectRole() === Role.OWNER
? {
src: './icons/teamMember.svg',
alt: 'team',

View File

@ -161,7 +161,7 @@ const SideBar: React.FC = () => {
</CustomisedListItem>
)}
{role === UserRole.admin && projectRole === 'Owner' && (
{role === UserRole.ADMIN && projectRole === 'Owner' && (
<CustomisedListItem
key="usage-statistics"
handleClick={() => {

View File

@ -27,12 +27,12 @@ const UsefulLinks: React.FC<UsefulLinkProps> = ({
</Typography>
</div>
{maintainers?.map((m: Maintainer) => (
<div className={classes.maintainerField} key={m.Name}>
<div className={classes.maintainerField} key={m.name}>
<Typography className={classes.maintainerlinks}>
{m.Name}
{m.name}
</Typography>
<Typography className={classes.maintainerlinks}>
{m.Email}
{m.email}
</Typography>
</div>
))}
@ -50,15 +50,15 @@ const UsefulLinks: React.FC<UsefulLinkProps> = ({
</div>
{data?.map(
(d: Link) =>
d.Url && (
<div key={d.Name}>
d.url && (
<div key={d.name}>
<a
target="_blank"
rel="noopener noreferrer"
href={d.Url}
href={d.url}
className={classes.createLinkText}
>
<Typography className={classes.linkType}>{d.Name}</Typography>
<Typography className={classes.linkType}>{d.name}</Typography>
</a>
</div>
)

View File

@ -206,7 +206,7 @@ const Routes: React.FC = () => {
<Route exact path="/create-workflow" component={CreateWorkflow} />
<Route
exact
path="/workflows/:workflowRunId"
path="/workflows/:workflowRunID"
component={WorkflowDetails}
/>
<Route
@ -221,7 +221,7 @@ const Routes: React.FC = () => {
/>
<Route
exact
path="/analytics/workflowStatistics/:workflowId"
path="/analytics/workflowStatistics/:workflowID"
component={WorkflowInfoStats}
/>
<Route exact path="/community" component={Community} />
@ -244,7 +244,7 @@ const Routes: React.FC = () => {
}}
/>
)}
{role === UserRole.admin ? (
{role === UserRole.ADMIN ? (
<Route path="/usage-statistics" component={UsageStatistics} />
) : (
<Redirect

View File

@ -1,320 +0,0 @@
import { gql } from '@apollo/client';
export const CREATE_WORKFLOW = gql`
mutation createChaosWorkFlow($ChaosWorkFlowInput: ChaosWorkFlowInput!) {
createChaosWorkFlow(input: $ChaosWorkFlowInput) {
workflow_id
cronSyntax
workflow_name
workflow_description
isCustomWorkflow
}
}
`;
export const ADD_WORKFLOW_TEMPLATE = gql`
mutation addWorkflowTemplate($data: TemplateInput!) {
createManifestTemplate(templateInput: $data) {
template_name
template_id
}
}
`;
export const DELETE_WORKFLOW_TEMPLATE = gql`
mutation deleteManifestTemplate($projectID: String!, $data: String!) {
deleteManifestTemplate(projectID: $projectID, template_id: $data)
}
`;
export const UPDATE_USER_STATE = gql`
mutation updateUserState($uid: String!, $isDeactivate: Boolean!) {
updateUserState(uid: $uid, isDeactivate: $isDeactivate)
}
`;
export const UPDATE_SCHEDULE = gql`
mutation updateChaos($ChaosWorkFlowInput: ChaosWorkFlowInput!) {
updateChaosWorkflow(input: $ChaosWorkFlowInput) {
workflow_id
workflow_name
workflow_description
isCustomWorkflow
cronSyntax
}
}
`;
export const UPDATE_DETAILS = gql`
mutation updateUser($user: UpdateUserInput!) {
updateUser(user: $user)
}
`;
export const USER_CLUSTER_REG = gql`
mutation userCluster($ClusterInput: ClusterInput!) {
userClusterReg(clusterInput: $ClusterInput) {
token
cluster_id
cluster_name
}
}
`;
export const ADD_MY_HUB = gql`
mutation addMyHub($MyHubDetails: CreateMyHub!, $projectID: String!) {
addMyHub(myhubInput: $MyHubDetails, projectID: $projectID) {
HubName
RepoURL
RepoBranch
}
}
`;
export const SAVE_MY_HUB = gql`
mutation saveMyHub($MyHubDetails: CreateMyHub!, $projectID: String!) {
saveMyHub(myhubInput: $MyHubDetails, projectID: $projectID) {
HubName
RepoURL
RepoBranch
}
}
`;
export const UPDATE_MY_HUB = gql`
mutation updateMyHub($MyHubDetails: UpdateMyHub!, $projectID: String!) {
updateMyHub(myhubInput: $MyHubDetails, projectID: $projectID) {
HubName
RepoURL
RepoBranch
}
}
`;
export const SYNC_REPO = gql`
mutation syncHub($id: ID!, $projectID: String!) {
syncHub(id: $id, projectID: $projectID) {
id
RepoURL
RepoBranch
IsAvailable
TotalExp
HubName
}
}
`;
export const DELETE_HUB = gql`
mutation deleteMyHub($hub_id: String!, $projectID: String!) {
deleteMyHub(hub_id: $hub_id, projectID: $projectID)
}
`;
export const GENERATE_SSH = gql`
mutation generateSSHKey {
generaterSSHKey {
privateKey
publicKey
}
}
`;
export const DELETE_CLUSTERS = gql`
mutation deleteClusters($projectID: String!, $cluster_ids: [String]!) {
deleteClusters(projectID: $projectID, cluster_ids: $cluster_ids)
}
`;
export const ENABLE_GITOPS = gql`
mutation enableGitOps($gitConfig: GitConfig!) {
enableGitOps(config: $gitConfig)
}
`;
export const UPDATE_GITOPS = gql`
mutation updateGitOps($gitConfig: GitConfig!) {
updateGitOps(config: $gitConfig)
}
`;
export const DISABLE_GITOPS = gql`
mutation disableGitOPs($data: String!) {
disableGitOps(project_id: $data)
}
`;
export const RERUN_CHAOS_WORKFLOW = gql`
mutation rerunChaosWorkflow($projectID: String!, $data: String!) {
reRunChaosWorkFlow(projectID: $projectID, workflowID: $data)
}
`;
export const LEAVE_PROJECT = gql`
mutation LeaveProject($data: MemberInput!) {
leaveProject(member: $data)
}
`;
export const CREATE_DATASOURCE = gql`
mutation createDataSource($DSInput: DSInput) {
createDataSource(datasource: $DSInput) {
ds_id
ds_name
ds_type
ds_url
access_type
auth_type
basic_auth_username
basic_auth_password
scrape_interval
query_timeout
http_method
project_id
health_status
}
}
`;
export const UPDATE_DATASOURCE = gql`
mutation updateDataSource($DSInput: DSInput!) {
updateDataSource(datasource: $DSInput) {
ds_id
ds_name
ds_type
ds_url
access_type
auth_type
basic_auth_username
basic_auth_password
scrape_interval
query_timeout
http_method
project_id
}
}
`;
export const DELETE_DATASOURCE = gql`
mutation deleteDataSource(
$projectID: String!
$deleteDSInput: deleteDSInput!
) {
deleteDataSource(projectID: $projectID, input: $deleteDSInput)
}
`;
export const CREATE_DASHBOARD = gql`
mutation createDashBoard($createDBInput: createDBInput) {
createDashBoard(dashboard: $createDBInput) {
db_id
}
}
`;
export const UPDATE_DASHBOARD = gql`
mutation updateDashboard(
$projectID: String!
$updateDBInput: updateDBInput!
$chaosQueryUpdate: Boolean!
) {
updateDashboard(
projectID: $projectID
dashboard: $updateDBInput
chaosQueryUpdate: $chaosQueryUpdate
)
}
`;
export const DELETE_DASHBOARD = gql`
mutation deleteDashboard($projectID: String!, $dbID: String) {
deleteDashboard(projectID: $projectID, db_id: $dbID)
}
`;
export const UPDATE_PANEL = gql`
mutation updatePanel($panelInput: [panel]) {
updatePanel(panelInput: $panelInput)
}
`;
export const ADD_IMAGE_REGISTRY = gql`
mutation createImageRegistry(
$projectID: String!
$imageRegistryInfo: imageRegistryInput!
) {
createImageRegistry(
project_id: $projectID
imageRegistryInfo: $imageRegistryInfo
) {
image_registry_info {
image_repo_name
image_registry_name
image_registry_type
is_default
}
}
}
`;
export const UPDATE_IMAGE_REGISTRY = gql`
mutation updateImageRegistry(
$imageRegistryID: String!
$projectID: String!
$imageRegistryInfo: imageRegistryInput!
) {
updateImageRegistry(
image_registry_id: $imageRegistryID
project_id: $projectID
imageRegistryInfo: $imageRegistryInfo
) {
image_registry_info {
image_repo_name
image_registry_name
image_registry_type
is_default
}
}
}
`;
export const SYNC_WORKFLOW = gql`
mutation syncWorkflow(
$projectID: String!
$workflowID: String!
$workflow_run_id: String!
) {
syncWorkflow(
projectID: $projectID
workflowID: $workflowID
workflow_run_id: $workflow_run_id
)
}
`;
export const DELETE_WORKFLOW = gql`
mutation deleteWorkflow(
$projectID: String!
$workflowID: String
$workflow_run_id: String
) {
deleteChaosWorkflow(
projectID: $projectID
workflowID: $workflowID
workflow_run_id: $workflow_run_id
)
}
`;
export const TERMINATE_WORKFLOW = gql`
mutation terminateWorkflow(
$projectID: String!
$workflowID: String
$workflow_run_id: String
) {
terminateChaosWorkflow(
projectID: $projectID
workflowID: $workflowID
workflow_run_id: $workflow_run_id
)
}
`;

View File

@ -0,0 +1,83 @@
import { gql } from '@apollo/client';
export const CREATE_DATASOURCE = gql`
mutation createDataSource($DSInput: DSInput) {
createDataSource(datasource: $DSInput) {
dsID
dsName
dsType
dsURL
accessType
authType
basicAuthUsername
basicAuthPassword
scrapeInterval
queryTimeout
httpMethod
projectID
healthStatus
}
}
`;
export const UPDATE_DATASOURCE = gql`
mutation updateDataSource($DSInput: DSInput!) {
updateDataSource(datasource: $DSInput) {
dsID
dsName
dsType
dsURL
accessType
authType
basicAuthUsername
basicAuthPassword
scrapeInterval
queryTimeout
httpMethod
projectID
}
}
`;
export const DELETE_DATASOURCE = gql`
mutation deleteDataSource(
$projectID: String!
$deleteDSInput: deleteDSInput!
) {
deleteDataSource(projectID: $projectID, input: $deleteDSInput)
}
`;
export const CREATE_DASHBOARD = gql`
mutation createDashBoard($dashboard: CreateDBInput!) {
createDashBoard(dashboard: $dashboard) {
dbID
}
}
`;
export const UPDATE_DASHBOARD = gql`
mutation updateDashboard(
$projectID: String!
$dashboard: UpdateDBInput!
$chaosQueryUpdate: Boolean!
) {
updateDashboard(
projectID: $projectID
dashboard: $dashboard
chaosQueryUpdate: $chaosQueryUpdate
)
}
`;
export const DELETE_DASHBOARD = gql`
mutation deleteDashboard($projectID: String!, $dbID: String) {
deleteDashboard(projectID: $projectID, dbID: $dbID)
}
`;
export const UPDATE_PANEL = gql`
mutation updatePanel($panelInput: [panel]) {
updatePanel(panelInput: $panelInput)
}
`;

View File

@ -0,0 +1,50 @@
import { gql } from '@apollo/client';
// chaosHub (Change mutation name to add_chaos_hub)
export const ADD_MY_HUB = gql`
mutation addChaosHub($request: CreateChaosHubRequest!) {
addChaosHub(request: $request) {
hubName
repoURL
repoBranch
}
}
`;
export const UPDATE_MY_HUB = gql`
mutation updateChaosHub($request: UpdateChaosHubRequest!) {
updateChaosHub(request: $request) {
hubName
repoURL
repoBranch
}
}
`;
export const SYNC_REPO = gql`
mutation syncChaosHub($id: ID!, $projectID: String!) {
syncChaosHub(id: $id, projectID: $projectID) {
id
repoURL
repoBranch
isAvailable
totalExp
hubName
}
}
`;
export const DELETE_HUB = gql`
mutation deleteChaosHub($hubID: String!, $projectID: String!) {
deleteChaosHub(hubID: $hubID, projectID: $projectID)
}
`;
export const GENERATE_SSH = gql`
mutation generateSSHKey {
generaterSSHKey {
privateKey
publicKey
}
}
`;

View File

@ -0,0 +1,20 @@
import { gql } from '@apollo/client';
// gitOps
export const ENABLE_GITOPS = gql`
mutation enableGitOps($config: GitConfigResponse!) {
enableGitOps(config: $config)
}
`;
export const UPDATE_GITOPS = gql`
mutation updateGitOps($config: GitConfigResponse!) {
updateGitOps(config: $config)
}
`;
export const DISABLE_GITOPS = gql`
mutation disableGitOps($projectID: String!) {
disableGitOps(projectID: $data)
}
`;

View File

@ -0,0 +1,42 @@
import { gql } from '@apollo/client';
// ImageResgistry
export const ADD_IMAGE_REGISTRY = gql`
mutation createImageRegistry(
$projectID: String!
$imageRegistryInfo: imageRegistryInput!
) {
createImageRegistry(
projectID: $projectID
imageRegistryInfo: $imageRegistryInfo
) {
imageRegistryInfo {
imageRepoName
imageRegistryName
imageRegistryType
isDefault
}
}
}
`;
export const UPDATE_IMAGE_REGISTRY = gql`
mutation updateImageRegistry(
$imageRegistryID: String!
$projectID: String!
$imageRegistryInfo: ImageRegistryInput!
) {
updateImageRegistry(
imageRegistryID: $imageRegistryID
projectID: $projectID
imageRegistryInfo: $imageRegistryInfo
) {
imageRegistryInfo {
imageRepoName
imageRegistryName
imageRegistryType
isDefault
}
}
}
`;

View File

@ -0,0 +1,6 @@
export * from './analytics';
export * from './chaosHub';
export * from './gitOps';
export * from './imageRegistry';
export * from './template';
export * from './workflows';

View File

@ -0,0 +1,22 @@
import { gql } from '@apollo/client';
export const ADD_WORKFLOW_TEMPLATE = gql`
mutation createWorkflowTemplate($request: TemplateInput!) {
createWorkflowTemplate(request: $request) {
templateName
templateID
}
}
`;
export const DELETE_WORKFLOW_TEMPLATE = gql`
mutation deleteWorkflowTemplate($projectID: String!, $templateID: String!) {
deleteWorkflowTemplate(projectID: $projectID, templateID: $templateID)
}
`;
export const DELETE_CLUSTERS = gql`
mutation deleteClusters($projectID: String!, $clusterIDs: [String]!) {
deleteClusters(projectID: $projectID, clusterIDs: $clusterIDs)
}
`;

View File

@ -0,0 +1,74 @@
import { gql } from '@apollo/client';
// Workflow
export const CREATE_WORKFLOW = gql`
mutation createChaosWorkFlow($request: ChaosWorkFlowRequest!) {
createChaosWorkFlow(request: $request) {
workflowID
cronSyntax
workflowName
workflowDescription
isCustomWorkflow
}
}
`;
export const UPDATE_SCHEDULE = gql`
mutation updateChaosWorkflow($ChaosWorkFlowInput: ChaosWorkFlowInput!) {
updateChaosWorkflow(input: $ChaosWorkFlowInput) {
workflowID
cronSyntax
workflowName
workflowDescription
isCustomWorkflow
}
}
`;
export const RERUN_CHAOS_WORKFLOW = gql`
mutation reRunChaosWorkflow($projectID: String!, $workflowID: String!) {
reRunChaosWorkFlow(projectID: $projectID, workflowID: $workflowID)
}
`;
export const SYNC_WORKFLOW = gql`
mutation syncWorkflow(
$projectID: String!
$workflowID: String!
$workflowRunID: String!
) {
syncWorkflow(
projectID: $projectID
workflowID: $workflowID
workflowRunID: $workflowRunID
)
}
`;
export const DELETE_WORKFLOW = gql`
mutation deleteChaosWorkflow(
$projectID: String!
$workflowID: String
$workflowRunID: String
) {
deleteChaosWorkflow(
projectID: $projectID
workflowID: $workflowID
workflowRunID: $workflowRunID
)
}
`;
export const TERMINATE_WORKFLOW = gql`
mutation terminateWorkflow(
$projectID: String!
$workflowID: String
$workflowRunID: String
) {
terminateChaosWorkflow(
projectID: $projectID
workflowID: $workflowID
workflowRunID: $workflowRunID
)
}
`;

View File

@ -1,648 +0,0 @@
import { gql } from '@apollo/client';
export const WORKFLOW_DETAILS_WITH_EXEC_DATA = gql`
query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
total_no_of_workflow_runs
workflow_runs {
workflow_id
workflow_name
workflow_run_id
cluster_name
last_updated
cluster_id
phase
execution_data
resiliency_score
isRemoved
executed_by
}
}
}
`;
export const WORKFLOW_DETAILS = gql`
query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
total_no_of_workflow_runs
workflow_runs {
workflow_run_id
workflow_id
cluster_name
last_updated
project_id
cluster_id
workflow_name
cluster_type
phase
resiliency_score
experiments_passed
experiments_failed
experiments_awaited
experiments_stopped
experiments_na
total_experiments
isRemoved
executed_by
}
}
}
`;
export const WORKFLOW_RUN_DETAILS = gql`
query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
total_no_of_workflow_runs
workflow_runs {
weightages {
experiment_name
weightage
}
workflow_id
workflow_name
workflow_run_id
cluster_name
execution_data
last_updated
phase
resiliency_score
experiments_passed
total_experiments
isRemoved
executed_by
}
}
}
`;
export const WORKFLOW_STATS = gql`
query getWorkflowStats(
$filter: TimeFrequency!
$project_id: ID!
$show_workflow_runs: Boolean!
) {
getWorkflowStats(
filter: $filter
project_id: $project_id
show_workflow_runs: $show_workflow_runs
) {
date
value
}
}
`;
export const STACKED_BAR_GRAPH = gql`
query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
total_no_of_workflow_runs
workflow_runs {
workflow_run_id
workflow_name
last_updated
total_experiments
experiments_passed
resiliency_score
}
}
}
`;
export const WORKFLOW_LIST_DETAILS = gql`
query workflowListDetails($workflowInput: ListWorkflowsInput!) {
ListWorkflow(workflowInput: $workflowInput) {
total_no_of_workflows
workflows {
workflow_id
workflow_manifest
cronSyntax
cluster_name
workflow_name
workflow_description
weightages {
experiment_name
weightage
}
isCustomWorkflow
updated_at
created_at
project_id
cluster_id
cluster_type
isRemoved
last_updated_by
}
}
}
`;
export const WORKFLOW_LIST_DETAILS_FOR_MANIFEST = gql`
query workflowListDetails($projectID: String!, $workflowIDs: [ID]) {
ListWorkflow(project_id: $projectID, workflow_ids: $workflowIDs) {
workflow_id
workflow_manifest
workflow_name
}
}
`;
export const GET_WORKFLOW_RUNS_STATS = gql`
query getWorkflowRunStats(
$workflowRunStatsRequest: WorkflowRunStatsRequest!
) {
getWorkflowRunStats(workflowRunStatsRequest: $workflowRunStatsRequest) {
total_workflow_runs
succeeded_workflow_runs
failed_workflow_runs
running_workflow_runs
workflow_run_succeeded_percentage
workflow_run_failed_percentage
average_resiliency_score
passed_percentage
failed_percentage
total_experiments
experiments_passed
experiments_failed
experiments_awaited
experiments_stopped
experiments_na
}
}
`;
export const GET_CLUSTER = gql`
query getClusters($project_id: String!, $cluster_type: String) {
getCluster(project_id: $project_id, cluster_type: $cluster_type) {
cluster_id
cluster_name
description
is_active
is_registered
is_cluster_confirmed
updated_at
created_at
cluster_type
no_of_schedules
no_of_workflows
token
last_workflow_timestamp
agent_namespace
agent_scope
version
}
}
`;
export const GET_CLUSTER_LENGTH = gql`
query getClusters($project_id: String!) {
getCluster(project_id: $project_id) {
cluster_id
}
}
`;
export const GET_CLUSTER_NAMES = gql`
query getClusters($project_id: String!) {
getCluster(project_id: $project_id) {
cluster_name
}
}
`;
export const ALL_USERS = gql`
query allUsers {
users {
id
name
username
email
created_at
deactivated_at
}
}
`;
export const CORE_CHART_FIELDS = gql`
fragment CoreChartFields on Chart {
ApiVersion
Kind
Metadata {
Name
Version
Annotations {
Categories
Vendor
CreatedAt
Repository
Support
ChartDescription
}
}
Spec {
DisplayName
CategoryDescription
Keywords
Maturity
Experiments
Maintainers {
Name
Email
}
MinKubeVersion
Provider
Links {
Name
Url
}
ChaosExpCRDLink
Platforms
ChaosType
}
PackageInfo {
PackageName
Experiments {
Name
CSV
Desc
}
}
}
`;
export const GET_CHARTS_DATA = gql`
${CORE_CHART_FIELDS}
query getCharts($HubName: String!, $projectID: String!) {
getCharts(HubName: $HubName, projectID: $projectID) {
...CoreChartFields
}
}
`;
export const GET_EXPERIMENT_DATA = gql`
${CORE_CHART_FIELDS}
query getExperiment($data: ExperimentInput!) {
getHubExperiment(experimentInput: $data) {
...CoreChartFields
}
}
`;
export const GET_HUB_STATUS = gql`
query getHubStatus($data: String!) {
getHubStatus(projectID: $data) {
id
HubName
RepoBranch
RepoURL
TotalExp
IsAvailable
AuthType
IsPrivate
Token
UserName
Password
SSHPrivateKey
SSHPublicKey
LastSyncedAt
}
}
`;
export const GET_ENGINE_YAML = gql`
query getEngineData($experimentInput: ExperimentInput!) {
getYAMLData(experimentInput: $experimentInput)
}
`;
export const GET_EXPERIMENT_YAML = gql`
query getExperimentData($experimentInput: ExperimentInput!) {
getYAMLData(experimentInput: $experimentInput)
}
`;
export const GET_GITOPS_DATA = gql`
query gitOPsData($data: String!) {
getGitOpsDetails(project_id: $data) {
Enabled
ProjectID
Branch
RepoURL
AuthType
Token
UserName
Password
SSHPrivateKey
}
}
`;
export const LIST_MANIFEST_TEMPLATE = gql`
query ListManifestTemplate($data: String!) {
ListManifestTemplate(project_id: $data) {
template_id
manifest
project_name
template_description
template_name
isCustomWorkflow
}
}
`;
export const LIST_DATASOURCE = gql`
query listDataSource($projectID: String!) {
ListDataSource(project_id: $projectID) {
ds_id
ds_name
ds_type
ds_url
access_type
auth_type
basic_auth_username
basic_auth_password
scrape_interval
query_timeout
http_method
project_id
created_at
updated_at
health_status
}
}
`;
export const LIST_DATASOURCE_OVERVIEW = gql`
query listDataSource($projectID: String!) {
ListDataSource(project_id: $projectID) {
ds_id
}
}
`;
export const GET_PORTAL_DASHBOARDS = gql`
query getPortalDashboards($projectID: String!, $hubName: String!) {
PortalDashboardData(project_id: $projectID, hub_name: $hubName) {
name
dashboard_data
}
}
`;
export const LIST_DASHBOARD = gql`
query listDashboard($projectID: String!, $clusterID: String, $dbID: String) {
ListDashboard(
project_id: $projectID
cluster_id: $clusterID
db_id: $dbID
) {
db_id
ds_id
db_name
cluster_name
ds_name
ds_type
ds_url
ds_health_status
db_type_id
db_type_name
db_information
chaos_event_query_template
chaos_verdict_query_template
application_metadata_map {
namespace
applications {
kind
names
}
}
panel_groups {
panels {
panel_id
created_at
prom_queries {
queryid
prom_query_name
legend
resolution
minstep
line
close_area
}
panel_options {
points
grids
left_axis
}
panel_name
y_axis_left
y_axis_right
x_axis_down
unit
}
panel_group_name
panel_group_id
}
end_time
start_time
refresh_rate
project_id
cluster_id
viewed_at
}
}
`;
export const LIST_DASHBOARD_OVERVIEW = gql`
query listDashboard($projectID: String!, $clusterID: String, $dbID: String) {
ListDashboard(
project_id: $projectID
cluster_id: $clusterID
db_id: $dbID
) {
db_id
db_name
db_type_id
db_type_name
cluster_name
cluster_id
viewed_at
db_information
chaos_event_query_template
chaos_verdict_query_template
application_metadata_map {
namespace
applications {
kind
names
}
}
panel_groups {
panels {
panel_id
created_at
prom_queries {
queryid
prom_query_name
legend
resolution
minstep
line
close_area
}
panel_options {
points
grids
left_axis
}
panel_name
y_axis_left
y_axis_right
x_axis_down
unit
}
panel_group_name
panel_group_id
}
}
}
`;
export const PROM_QUERY = gql`
query PrometheusQuery($prometheusInput: promInput) {
GetPromQuery(query: $prometheusInput) {
metricsResponse {
queryid
legends
tsvs {
date
value
}
}
annotationsResponse {
queryid
legends
tsvs {
date
value
}
}
}
}
`;
export const PROM_LABEL_VALUES = gql`
query PrometheusLabelValues($prometheusInput: promSeriesInput) {
GetPromLabelNamesAndValues(series: $prometheusInput) {
series
labelValues {
label
values {
name
}
}
}
}
`;
export const PROM_SERIES_LIST = gql`
query PrometheusSeriesList($prometheusDSInput: dsDetails) {
GetPromSeriesList(ds_details: $prometheusDSInput) {
seriesList
}
}
`;
export const GET_TEMPLATE_BY_ID = gql`
query GetManifestTemplate($projectID: String!, $data: String!) {
GetTemplateManifestByID(projectID: $projectID, template_id: $data) {
template_id
template_name
template_description
manifest
}
}
`;
export const GET_PREDEFINED_WORKFLOW_LIST = gql`
query GetPredefinedWorkflowList($hubname: String!, $projectid: String!) {
GetPredefinedWorkflowList(HubName: $hubname, projectID: $projectid)
}
`;
export const GET_PREDEFINED_EXPERIMENT_YAML = gql`
query GetPredefinedExperimentYAML($experimentInput: ExperimentInput!) {
GetPredefinedExperimentYAML(experimentInput: $experimentInput)
}
`;
export const LIST_IMAGE_REGISTRY = gql`
query ListImageRegistry($data: String!) {
ListImageRegistry(project_id: $data) {
image_registry_info {
enable_registry
is_default
}
image_registry_id
}
}
`;
export const GET_IMAGE_REGISTRY = gql`
query GetImageRegistry($registryid: String!, $projectid: String!) {
GetImageRegistry(image_registry_id: $registryid, project_id: $projectid) {
image_registry_info {
is_default
enable_registry
secret_name
secret_namespace
image_registry_name
image_repo_name
image_registry_type
}
image_registry_id
}
}
`;
export const GET_GLOBAL_STATS = gql`
query getGlobalStats($query: UsageQuery!) {
UsageQuery(query: $query) {
TotalCount {
Workflows {
Runs
ExpRuns
Schedules
}
Agents {
Ns
Cluster
Total
}
Projects
Users
}
}
}
`;
export const GLOBAL_PROJECT_DATA = gql`
query getStats($query: UsageQuery!) {
UsageQuery(query: $query) {
TotalCount {
Projects
}
Projects {
ProjectId
Workflows {
Schedules
ExpRuns
Runs
}
Agents {
Total
Ns
Cluster
}
}
}
}
`;

View File

@ -0,0 +1,108 @@
import { gql } from '@apollo/client';
// listHubStatus
export const CORE_CHART_FIELDS = gql`
fragment CoreChartFields on Chart {
apiVersion
kind
metadata {
name
version
annotations {
categories
vendor
createdAt
repository
support
chartDescription
}
}
spec {
displayName
categoryDescription
keywords
maturity
experiments
maintainers {
name
email
}
minKubeVersion
provider {
name
}
links {
name
url
}
chaosExpCRDLink
platforms
chaosType
}
packageInfo {
packageName
experiments {
name
CSV
desc
}
}
}
`;
export const GET_CHARTS_DATA = gql`
${CORE_CHART_FIELDS}
query listCharts($hubName: String!, $projectID: String!) {
listCharts(hubName: $hubName, projectID: $projectID) {
...CoreChartFields
}
}
`;
export const GET_EXPERIMENT_DATA = gql`
${CORE_CHART_FIELDS}
query getHubExperiment($request: ExperimentRequest!) {
getHubExperiment(request: $request) {
...CoreChartFields
}
}
`;
export const GET_HUB_STATUS = gql`
query listHubStatus($projectID: String!) {
listHubStatus(projectID: $projectID) {
id
hubName
repoBranch
repoURL
totalExp
isAvailable
authType
isPrivate
token
userName
password
sshPrivateKey
sshPublicKey
lastSyncedAt
}
}
`;
export const GET_PREDEFINED_EXPERIMENT_YAML = gql`
query getPredefinedExperimentYAML($request: ExperimentRequest!) {
getPredefinedExperimentYAML(request: $request)
}
`;
export const GET_ENGINE_YAML = gql`
query getEngineData($request: ExperimentRequest!) {
getYAMLData(request: $request)
}
`;
export const GET_EXPERIMENT_YAML = gql`
query getYAMLData($request: ExperimentRequest!) {
getYAMLData(request: $request)
}
`;

View File

@ -0,0 +1,40 @@
import { gql } from '@apollo/client';
export const GET_CLUSTER = gql`
query listClusters($projectID: String!, $clusterType: String) {
listClusters(projectID: $projectID, clusterType: $clusterType) {
clusterID
clusterName
description
isActive
isRegistered
isClusterConfirmed
updatedAt
createdAt
clusterType
noOfSchedules
noOfWorkflows
token
lastWorkflowTimestamp
agentNamespace
agentScope
version
}
}
`;
export const GET_CLUSTER_LENGTH = gql`
query listClusters($projectID: String!) {
listClusters(projectID: $projectID) {
clusterID
}
}
`;
export const GET_CLUSTER_NAMES = gql`
query listClusters($projectID: String!) {
listClusters(projectID: $projectID) {
clusterName
}
}
`;

View File

@ -0,0 +1,18 @@
import { gql } from '@apollo/client';
// getGitOpsDetails
export const GET_GITOPS_DATA = gql`
query getGitOpsDetails($projectID: String!) {
getGitOpsDetails(projectID: $projectID) {
enabled
projectID
branch
repoURL
authType
token
userName
password
sshPrivateKey
}
}
`;

View File

@ -0,0 +1,32 @@
import { gql } from '@apollo/client';
// GetImageRegistry
export const LIST_IMAGE_REGISTRY_BY_PROJECT_ID = gql`
query listImageRegistry($data: String!) {
listImageRegistry(projectID: $data) {
imageRegistryInfo {
enableRegistry
isDefault
}
imageRegistryID
}
}
`;
// getImageRegistry
export const GET_IMAGE_REGISTRY = gql`
query getImageRegistry($imageRegistryID: String!, $projectID: String!) {
getImageRegistry(imageRegistryID: $imageRegistryID, projectID: $projectID) {
imageRegistryInfo {
isDefault
enableRegistry
secretName
secretNamespace
imageRegistryName
imageRepoName
imageRegistryType
}
imageRegistryID
}
}
`;

View File

@ -0,0 +1,8 @@
export * from './chaosHub';
export * from './cluster';
export * from './gitops';
export * from './imageRegistry';
export * from './manifest';
export * from './observability';
export * from './usage';
export * from './workflows';

View File

@ -0,0 +1,26 @@
import { gql } from '@apollo/client';
// GetManifestTemplate
export const GET_MANIFEST_TEMPLATE = gql`
query listWorkflowManifests($projectID: String!) {
listWorkflowManifests(projectID: $projectID) {
templateID
manifest
projectName
templateDescription
templateName
isCustomWorkflow
}
}
`;
export const GET_TEMPLATE_BY_ID = gql`
query getWorkflowManifestByID($projectID: String!, $templateID: String!) {
getWorkflowManifestByID(templateID: $templateID, projectID: $projectID) {
templateID
templateName
templateDescription
manifest
}
}
`;

View File

@ -0,0 +1,202 @@
import { gql } from '@apollo/client';
// GetDataSource
export const GET_DATASOURCE = gql`
query listDataSource($projectID: String!) {
listDataSource(projectID: $projectID) {
dsID
dsName
dsType
dsURL
accessType
authType
basicAuthUsername
basicAuthPassword
scrapeInterval
queryTimeout
httpMethod
projectID
healthStatus
createdAt
updatedAt
}
}
`;
export const GET_DATASOURCE_OVERVIEW = gql`
query listDataSource($projectID: String!) {
listDataSource(projectID: $projectID) {
dsID
}
}
`;
// portalDashboardData
export const GET_PORTAL_DASHBOARDS = gql`
query listPortalDashboardData($projectID: String!, $hubName: String!) {
listPortalDashboardData(projectID: $projectID, hubName: $hubName) {
name
dashboardData
}
}
`;
// listDashboard
export const GET_DASHBOARD = gql`
query listDashboard($projectID: String!, $clusterID: String, $dbID: String) {
listDashboard(projectID: $projectID, clusterID: $clusterID, dbID: $dbID) {
dbID
dsID
dbName
clusterName
dsName
dsType
dsURL
dsHealthStatus
dbTypeID
dbTypeName
dbInformation
chaosEventQueryTemplate
chaosVerdictQueryTemplate
applicationMetadataMap {
namespace
applications {
kind
names
}
}
panelGroups {
panels {
panelID
createdAt
promQueries {
queryID
promQueryName
legend
resolution
minstep
line
closeArea
}
panelOptions {
points
grIDs
leftAxis
}
panelName
yAxisLeft
yAxisRight
xAxisDown
unit
}
panelGroupName
panelGroupID
}
endTime
startTime
refreshRate
projectID
clusterID
viewedAt
}
}
`;
export const GET_DASHBOARD_OVERVIEW = gql`
query listDashboard($projectID: String!, $clusterID: String, $dbID: String) {
listDashboard(projectID: $projectID, clusterID: $clusterID, dbID: $dbID) {
dbID
dbName
dbTypeID
dbTypeName
clusterName
clusterID
viewedAt
dbInformation
chaosEventQueryTemplate
chaosVerdictQueryTemplate
applicationMetadataMap {
namespace
applications {
kind
names
}
}
panelGroups {
panels {
panelID
createdAt
promQueries {
queryID
promQueryName
legend
resolution
minstep
line
closeArea
}
panelOptions {
points
grIDs
leftAxis
}
panelName
yAxisLeft
yAxisRight
xAxisDown
unit
}
panelGroupName
panelGroupID
}
}
}
`;
// getPromQuery
export const PROM_QUERY = gql`
query PrometheusQuery($request: PrometheusDataRequest!) {
getPrometheusData(request: $request) {
metricsResponse {
queryID
legends
tsvs {
date
value
}
}
annotationsResponse {
queryID
legends
tsvs {
date
value
}
}
}
}
`;
// getPromLabelNamesAndValues
export const PROM_LABEL_VALUES = gql`
query PrometheusLabelValues($request: PromSeriesInput) {
getPromLabelNamesAndValues(request: $request) {
series
labelValues {
label
values {
name
}
}
}
}
`;
// getPromSeriesList
export const PROM_SERIES_LIST = gql`
query getPromSeriesList($request: DsDetails) {
getPromSeriesList(request: $request) {
seriesList
}
}
`;

View File

@ -0,0 +1,58 @@
import { gql } from '@apollo/client';
// usageQuery
export const GET_GLOBAL_STATS = gql`
query getGlobalStats($request: UsageDataRequest!) {
getUsageData(request: $request) {
totalCount {
projects
users
agents {
ns
cluster
total
}
workflows {
schedules
runs
expRuns
}
}
}
}
`;
// projectId -> projectID needs to be updated in backend
export const GLOBAL_PROJECT_DATA = gql`
query getStats($request: UsageDataRequest!) {
getUsageData(request: $request) {
totalCount {
projects
agents {
ns
total
cluster
active
}
workflows {
schedules
runs
expRuns
}
}
projects {
projectID
workflows {
schedules
runs
expRuns
}
agents {
ns
cluster
total
}
}
}
}
`;

View File

@ -0,0 +1,154 @@
import { gql } from '@apollo/client';
// GetWorkflowRuns
export const WORKFLOW_DETAILS_WITH_EXEC_DATA = gql`
query listWorkflowRuns($request: ListWorkflowRunsRequest!) {
listWorkflowRuns(request: $request) {
totalNoOfWorkflowRuns
workflowRuns {
workflowID
workflowName
workflowRunID
clusterName
lastUpdated
clusterID
phase
executionData
resiliencyScore
isRemoved
}
}
}
`;
export const WORKFLOW_DETAILS = gql`
query listWorkflowRuns($request: ListWorkflowRunsRequest!) {
listWorkflowRuns(request: $request) {
totalNoOfWorkflowRuns
workflowRuns {
workflowRunID
workflowID
clusterName
lastUpdated
projectID
clusterID
workflowName
clusterType
phase
resiliencyScore
experimentsPassed
experimentsFailed
experimentsAwaited
experimentsStopped
experimentsNa
totalExperiments
isRemoved
executedBy
}
}
}
`;
export const WORKFLOW_RUN_DETAILS = gql`
query listWorkflowRuns($request: ListWorkflowRunsRequest!) {
listWorkflowRuns(request: $request) {
totalNoOfWorkflowRuns
workflowRuns {
weightages {
experimentName
weightage
}
workflowID
workflowName
workflowRunID
clusterName
executionData
lastUpdated
phase
resiliencyScore
experimentsPassed
totalExperiments
isRemoved
}
}
}
`;
// getWorkflowStats
export const WORKFLOW_STATS = gql`
query listWorkflowStats(
$projectID: ID!
$filter: TimeFrequency!
$showWorkflowRuns: Boolean!
) {
listWorkflowStats(
projectID: $projectID
filter: $filter
showWorkflowRuns: $showWorkflowRuns
) {
date
value
}
}
`;
// ListWorkflow
export const GET_WORKFLOW_DETAILS = gql`
query listWorkflows($request: ListWorkflowsRequest!) {
listWorkflows(request: $request) {
totalNoOfWorkflows
workflows {
workflowID
workflowManifest
cronSyntax
clusterName
workflowName
workflowDescription
weightages {
experimentName
weightage
}
isCustomWorkflow
updatedAt
createdAt
projectID
clusterID
clusterType
isRemoved
lastUpdatedBy
}
}
}
`;
// getWorkflowRunStats
export const GET_WORKFLOW_RUNS_STATS = gql`
query getWorkflowRunStats(
$workflowRunStatsRequest: WorkflowRunStatsRequest!
) {
getWorkflowRunStats(workflowRunStatsRequest: $workflowRunStatsRequest) {
totalWorkflowRuns
succeededWorkflowRuns
failedWorkflowRuns
runningWorkflowRuns
workflowRunSucceededPercentage
workflowRunFailedPercentage
averageResiliencyScore
passedPercentage
failedPercentage
totalExperiments
experimentsPassed
experimentsFailed
experimentsAwaited
experimentsStopped
experimentsNa
}
}
`;
// getPredefinedWorkflowList
export const GET_PREDEFINED_WORKFLOW_LIST = gql`
query listPredefinedWorkflows($hubName: String!, $projectID: String!) {
listPredefinedWorkflows(hubName: $hubName, projectID: $projectID)
}
`;

View File

@ -1,73 +1,73 @@
import { gql } from '@apollo/client';
export const WORKFLOW_EVENTS_WITH_EXEC_DATA = gql`
subscription workflowEvents($projectID: String!) {
workflowEventListener(project_id: $projectID) {
workflow_id
workflow_name
workflow_run_id
cluster_name
last_updated
cluster_id
subscription getWorkflowEvents($projectID: String!) {
getWorkflowEvents(projectID: $projectID) {
workflowID
workflowName
workflowRunID
clusterName
lastUpdated
clusterID
phase
execution_data
resiliency_score
executionData
resiliencyScore
}
}
`;
export const WORKFLOW_EVENTS = gql`
subscription workflowEvents($projectID: String!) {
workflowEventListener(project_id: $projectID) {
workflow_id
workflow_name
workflow_run_id
cluster_name
last_updated
subscription getWorkflowEvents($projectID: String!) {
getWorkflowEvents(projectID: $projectID) {
workflowID
workflowName
workflowRunID
clusterName
lastUpdated
phase
resiliency_score
experiments_passed
total_experiments
resiliencyScore
experimentsPassed
totalExperiments
}
}
`;
export const WORKFLOW_LOGS = gql`
subscription podLog($podDetails: PodLogRequest!) {
getPodLog(podDetails: $podDetails) {
subscription podLog($request: PodLogRequest!) {
getPodLog(request: $request) {
log
}
}
`;
export const KUBE_OBJ = gql`
subscription getKubeObject($data: KubeObjectRequest!) {
getKubeObject(kubeObjectRequest: $data) {
cluster_id
kube_obj
subscription getKubeObject($request: KubeObjectRequest!) {
getKubeObject(request: $request) {
clusterID
kubeObj
}
}
`;
export const VIEW_DASHBOARD = gql`
subscription viewDashboard(
$dbID: String
$prometheusQueries: [promQueryInput!]!
$queryMap: [queryMapForPanelGroup!]!
$dataVarMap: dataVars!
$dashboardID: String
$promQueries: [PromQueryInput!]!
$dashboardQueryMap: [QueryMapForPanelGroup!]!
$dataVariables: DataVars!
) {
viewDashboard(
dashboardID: $dbID
promQueries: $prometheusQueries
dashboardQueryMap: $queryMap
dataVariables: $dataVarMap
dashboardID: $dashboardID
promQueries: $promQueries
dashboardQueryMap: $dashboardQueryMap
dataVariables: $dataVariables
) {
dashboardMetricsResponse {
panelGroupID
panelGroupMetricsResponse {
panelID
PanelMetricsResponse {
queryid
panelMetricsResponse {
queryID
legends
tsvs {
date
@ -77,7 +77,7 @@ export const VIEW_DASHBOARD = gql`
}
}
annotationsResponse {
queryid
queryID
legends
tsvs {
date

View File

@ -1,14 +1,14 @@
import { BrushPostitionProps, GraphMetric } from 'litmus-ui';
import {
ApplicationMetadata,
ListDashboardResponse,
GetDashboardResponse,
Panel,
PanelGroup,
PanelGroupResponse,
PanelOption,
PanelResponse,
PromQuery,
updatePanelGroupInput,
UpdatePanelGroupRequest,
} from './graphql/dashboardsDetails';
import { promQueryInput } from './graphql/prometheus';
@ -74,7 +74,7 @@ export interface DashboardDetails {
agentID?: string;
information?: string;
panelGroups?: PanelGroupDetails[];
panelGroupMap?: updatePanelGroupInput[];
panelGroupMap?: UpdatePanelGroupRequest[];
selectedPanelGroupMap?: PanelGroupMap[];
applicationMetadataMap?: ApplicationMetadata[];
selectedPanels?: PanelDetails[];
@ -165,8 +165,8 @@ export interface SelectedDashboardInformation {
chaosEventQueryTemplate: string;
chaosVerdictQueryTemplate: string;
applicationMetadataMap: ApplicationMetadata[];
dashboardListForAgent: ListDashboardResponse[];
metaData: ListDashboardResponse | undefined;
dashboardListForAgent: GetDashboardResponse[];
metaData: GetDashboardResponse | undefined;
closedAreaQueryIDs: string[];
dashboardKey: string;
panelNameAndIDList: PanelNameAndID[];
@ -187,9 +187,9 @@ export interface PromQueryDetails extends PromQuery {
}
export interface PanelDetails extends Panel {
ds_url?: string;
panel_group_name?: string;
prom_queries: PromQueryDetails[];
dsURL?: string;
panelGroupName?: string;
promQueries: PromQueryDetails[];
}
export interface PanelGroupDetails extends PanelGroup {

View File

@ -0,0 +1,44 @@
export interface SSHKey {
privateKey: string;
publicKey: string;
}
export interface SSHKeys {
generaterSSHKey: SSHKey;
}
export interface MyHubRequest {
id?: string;
hubName: string;
repoURL: string;
repoBranch: string;
isPrivate: Boolean;
authType: MyHubType;
token?: string;
userName?: string;
password?: string;
sshPrivateKey?: string;
sshPublicKey?: string;
projectID: string;
}
export interface MyHubData {
id: string;
repoURL: string;
repoBranch: string;
projectID: string;
hubName: string;
createdAt: string;
updatedAt: string;
}
export interface CreateMyHub {
request: MyHubRequest;
}
export enum MyHubType {
BASIC = 'BASIC',
TOKEN = 'TOKEN',
SSH = 'SSH',
NONE = 'NONE',
}

View File

@ -1,62 +1,52 @@
export interface Cluster {
cluster_id: string;
project_id: string;
cluster_name: string;
clusterID: string;
projectID: string;
clusterName: string;
description: string;
platform_name: string;
access_key: string;
is_registered: boolean;
is_cluster_confirmed: boolean;
is_active: boolean;
updated_at: string;
created_at: string;
cluster_type: string;
no_of_workflows: number;
no_of_schedules: number;
platformName: string;
accessKey: string;
isRegistered: boolean;
isClusterConfirmed: boolean;
isActive: boolean;
updatedAt: string;
createdAt: string;
clusterType: string;
noOfWorkflows: number;
noOfSchedules: number;
token: string;
agent_namespace: string;
serviceaccount: string;
agent_scope: string;
agent_ns_exists: boolean;
agent_sa_exists: boolean;
last_workflow_timestamp: string;
agentNamespace: string;
serviceAccount: string;
agentScope: string;
agentNSExists: boolean;
agentSAExists: boolean;
lastWorkflowTimestamp: string;
version: string;
}
export interface Clusters {
getCluster: Cluster[];
listClusters: Cluster[];
}
export interface CreateClusterInput {
ClusterInput: {
cluster_name: string;
export interface CreateClusterRequest {
request: {
clusterName: string;
description: string;
platform_name: string;
project_id: string;
cluster_type: string;
agent_namespace: string;
serviceaccount: string;
agent_scope: string;
agent_ns_exists: boolean;
agent_sa_exists: boolean;
platformName: string;
projectID: string;
clusterType: string;
agentNamespace: string;
serviceAccount: string;
agentScope: string;
agentNSExists: boolean;
agentSAExists: boolean;
};
}
export interface clusterRegResponse {
token: string;
cluster_id: string;
cluster_name: string;
}
export interface CreateClusterInputResponse {
userClusterReg: clusterRegResponse;
}
export interface ClusterVars {
project_id: string;
export interface ClusterRequest {
projectID: string;
}
export interface DeleteClusters {
projectID: string;
cluster_ids: string;
clusterIDs: string;
}

View File

@ -1,32 +1,33 @@
export interface WeightMap {
experiment_name: string;
experimentName: string;
weightage: number;
}
export interface CreateWorkFlowInput {
ChaosWorkFlowInput: {
workflow_id?: string;
workflow_manifest: string;
export interface CreateWorkFlowRequest {
request: {
workflowID?: string;
workflowManifest: string;
cronSyntax: string;
workflow_name: string;
workflow_description: string;
workflowName: string;
workflowDescription: string;
isCustomWorkflow: boolean;
weightages: WeightMap[];
project_id: string;
cluster_id: string;
projectID: string;
clusterID: string;
};
}
export interface UpdateWorkflowResponse {
workflow_id: string;
workflow_name: string;
workflow_description: string;
workflowID: string;
workflowName: string;
workflowDescription: string;
isCustomWorkflow: string;
cronSyntax: string;
}
export interface CreateWorkflowResponse {
cluster_id: string;
is_active: boolean;
clusterID: string;
isActive: boolean;
}
export interface GVRRequest {
@ -36,28 +37,28 @@ export interface GVRRequest {
}
export interface KubeObjRequest {
data: {
cluster_id: string;
object_type: string;
kube_obj_request: GVRRequest;
request: {
clusterID: string;
objectType: string;
kubeObjRequest: GVRRequest;
};
}
export interface KubeObjResponse {
getKubeObject: {
cluster_id: string;
kube_obj: string;
clusterID: string;
kubeObj: string;
};
}
export interface KubeObjResource {
api_version: string;
apiVersion: string;
containers: object;
creation_timestamp: string;
creationTimestamp: string;
labels: string;
name: string;
namespace: string;
termination_grace_periods: string;
terminationGracePeriods: string;
uid: string;
volumes: object;
}

View File

@ -1,54 +1,54 @@
export interface PanelOption {
points: boolean;
grids: boolean;
left_axis: boolean;
grIDs: boolean;
leftAxis: boolean;
}
export interface PromQuery {
queryid: string;
prom_query_name: string;
queryID: string;
promQueryName: string;
legend: string;
resolution: string;
minstep: string;
line: boolean;
close_area: boolean;
closeArea: boolean;
}
export interface Panel {
panel_id?: string;
created_at?: string;
panel_group_id?: string;
prom_queries: PromQuery[];
panel_options: PanelOption;
panel_name: string;
y_axis_left: string;
y_axis_right: string;
x_axis_down: string;
panelID?: string;
createdAt?: string;
panelGroupID?: string;
promQueries: PromQuery[];
panelOptions: PanelOption;
panelName: string;
yAxisLeft: string;
yAxisRight: string;
xAxisDown: string;
unit: string;
}
export interface PanelGroup {
panel_group_id?: string;
panel_group_name: string;
panelGroupID?: string;
panelGroupName: string;
panels: Panel[];
}
export interface PanelResponse {
panel_id: string;
created_at: string;
prom_queries: PromQuery[];
panel_options: PanelOption;
panel_name: string;
y_axis_left: string;
y_axis_right: string;
x_axis_down: string;
panelID: string;
createdAt: string;
promQueries: PromQuery[];
panelOptions: PanelOption;
panelName: string;
yAxisLeft: string;
yAxisRight: string;
xAxisDown: string;
unit: string;
}
export interface PanelGroupResponse {
panels: PanelResponse[];
panel_group_name: string;
panel_group_id: string;
panelGroupName: string;
panelGroupID: string;
}
export interface Resource {
@ -61,73 +61,73 @@ export interface ApplicationMetadata {
applications: Resource[];
}
export interface CreateDashboardInput {
createDBInput: {
ds_id: string;
db_name: string;
db_type_id: string;
db_type_name: string;
db_information: string;
chaos_event_query_template: string;
chaos_verdict_query_template: string;
application_metadata_map: ApplicationMetadata[];
panel_groups: PanelGroup[];
end_time: string;
start_time: string;
project_id: string;
cluster_id: string;
refresh_rate: string;
export interface CreateDashboardRequest {
request: {
dsID: string;
dbName: string;
dbTypeID: string;
dbTypeName: string;
dbInformation: string;
chaosEventQueryTemplate: string;
chaosVerdictQueryTemplate: string;
applicationMetadataMap: ApplicationMetadata[];
panelGroups: PanelGroup[];
endTime: string;
startTime: string;
projectID: string;
clusterID: string;
refreshRate: string;
};
createDashBoard?: ListDashboardResponse;
createDashBoard?: GetDashboardResponse;
}
export interface updatePanelGroupInput {
panel_group_name: string;
panel_group_id: string;
export interface UpdatePanelGroupRequest {
panelGroupID: string;
panelGroupName: string;
panels: Panel[];
}
export interface UpdateDashboardInput {
updateDBInput: {
db_id: string;
ds_id?: string;
db_name?: string;
db_type_id?: string;
db_type_name?: string;
db_information?: string;
chaos_event_query_template?: string;
chaos_verdict_query_template?: string;
application_metadata_map?: ApplicationMetadata[];
end_time?: string;
start_time?: string;
cluster_id?: string;
refresh_rate?: string;
panel_groups?: updatePanelGroupInput[];
export interface UpdateDashboardRequest {
request: {
dbID: string;
dsID?: string;
dbName?: string;
dbTypeID?: string;
dbTypeName?: string;
dbInformation?: string;
chaosEventQueryTemplate?: string;
chaosVerdictQueryTemplate?: string;
applicationMetadataMap?: ApplicationMetadata[];
endTime?: string;
startTime?: string;
clusterID?: string;
refreshRate?: string;
panelGroups?: UpdatePanelGroupRequest[];
};
chaosQueryUpdate: boolean;
}
export interface DeleteDashboardInput {
export interface DeleteDashboardRequest {
projectID: string;
dbID: string;
}
export interface UpdatePanelInput {
panelInput: Panel[];
export interface UpdatePanelRequest {
request: Panel[];
}
export interface PortalDashboardsVars {
export interface PortalDashboardsRequest {
projectID: string;
hubName: string;
}
export interface PortalDashboardsResponse {
name: string;
dashboard_data: string;
dashboardData: string;
}
export interface PortalDashboardList {
PortalDashboardData: PortalDashboardsResponse[];
export interface GetPortalDashboard {
listPortalDashboardData: PortalDashboardsResponse[];
}
export interface ResourceResponse {
@ -140,39 +140,39 @@ export interface ApplicationMetadataResponse {
applications: ResourceResponse[];
}
export interface ListDashboardResponse {
db_id: string;
ds_id: string;
db_name: string;
db_type: string;
cluster_name: string;
ds_name: string;
ds_type: string;
ds_url: string;
ds_health_status: string;
db_type_id: string;
db_type_name: string;
db_information: string;
chaos_event_query_template: string;
chaos_verdict_query_template: string;
application_metadata_map: ApplicationMetadataResponse[];
panel_groups: PanelGroupResponse[];
end_time: string;
start_time: string;
refresh_rate: string;
project_id: string;
cluster_id: string;
created_at: string;
updated_at: string;
viewed_at: string;
export interface GetDashboardResponse {
dbID: string;
dsID: string;
dbName: string;
dbType: string;
clusterName: string;
dsName: string;
dsType: string;
dsURL: string;
dsHealthStatus: string;
dbTypeID: string;
dbTypeName: string;
dbInformation: string;
chaosEventQueryTemplate: string;
chaosVerdictQueryTemplate: string;
applicationMetadataMap: ApplicationMetadataResponse[];
panelGroups: PanelGroupResponse[];
endTime: string;
startTime: string;
refreshRate: string;
projectID: string;
clusterID: string;
createdAt: string;
updatedAt: string;
viewedAt: string;
}
export interface ListDashboardVars {
export interface GetDashboardRequest {
projectID: string;
clusterID?: string;
dbID?: string;
}
export interface DashboardList {
ListDashboard: ListDashboardResponse[];
export interface GetDashboard {
listDashboard: GetDashboardResponse[];
}

View File

@ -1,41 +1,41 @@
export interface CreateDataSourceInput {
DSInput: {
ds_id?: string;
ds_name: string;
ds_type: string;
ds_url: string;
access_type: string;
auth_type: string;
basic_auth_username?: string;
basic_auth_password?: string;
scrape_interval: number;
query_timeout: number;
http_method: string;
project_id?: string;
dsID?: string;
dsName: string;
dsType: string;
dsURL: string;
accessType: string;
authType: string;
basicAuthUsername?: string;
basicAuthPassword?: string;
scrapeInterval: number;
queryTimeout: number;
httpMethod: string;
projectID?: string;
};
}
export interface ListDataSourceResponse {
ds_id: string;
ds_name: string;
ds_type: string;
ds_url: string;
access_type: string;
auth_type: string;
basic_auth_username: string;
basic_auth_password: string;
scrape_interval: number;
query_timeout: number;
http_method: string;
project_id: string;
created_at: string;
updated_at: string;
health_status: string;
dsID: string;
dsName: string;
dsType: string;
dsURL: string;
accessType: string;
authType: string;
basicAuthUsername: string;
basicAuthPassword: string;
scrapeInterval: number;
queryTimeout: number;
httpMethod: string;
projectID: string;
createdAt: string;
updatedAt: string;
healthStatus: string;
}
export interface deleteDSInput {
force_delete: boolean;
ds_id: string;
forceDelete: boolean;
dsID: string;
}
export interface DeleteDataSourceInput {
@ -47,5 +47,5 @@ export interface ListDataSourceVars {
}
export interface DataSourceList {
ListDataSource: ListDataSourceResponse[];
listDataSource: ListDataSourceResponse[];
}

View File

@ -1,15 +1,15 @@
export interface GitOpsData {
Enabled: boolean;
ProjectID: string | null;
Branch: string | null;
RepoURL: string | null;
AuthType: string | null;
Token: string | null;
UserName: string | null;
Password: string | null;
SSHPrivateKey: string | null;
enabled: boolean;
projectID: string | null;
branch: string | null;
repoURL: string | null;
authType: string | null;
token: string | null;
userName: string | null;
password: string | null;
sshPrivateKey: string | null;
}
export interface GitOpsDetail {
export interface GetGitOpsDetailRequest {
getGitOpsDetails: GitOpsData;
}

View File

@ -0,0 +1,35 @@
export interface ImageRegistryInfo {
isDefault: boolean;
imageRegistryName: string;
imageRepoName: string;
imageRegistryType: string;
secretName: string;
secretNamespace: string;
enableRegistry: boolean;
}
export interface ImageRegistry {
isDefualt: boolean;
imageRegistryInfo: ImageRegistryInfo;
imageRegistryID: string;
projectID: string;
updatedAt: string;
createdAt: string;
isRemoved: boolean;
}
export interface CreateImageRegistryResponse {
createImageRegistry: ImageRegistry;
}
export interface GetImageRegistryResponse {
getImageRegistry: ImageRegistry;
}
export interface ListImageRegistryResponse {
listImageRegistry: ImageRegistry[];
}
export interface UpdateImageRegistryResponse {
updateImageRegistry: ImageRegistry;
}

View File

@ -1,31 +1,16 @@
export interface MemberInvitation {
member: {
project_id: string;
user_id: string;
};
}
export interface MemberInviteNew {
member: {
project_id: string;
user_id: string;
role: string;
};
}
export interface UserInvite {
_id: string;
name: string;
username: string;
email: string;
created_at: string;
deactivated_at: string;
createdAt: string;
deactivatedAt: string;
}
// Invitation status for users
export enum InvitationStatus {
accepted = 'Accepted',
pending = 'Pending',
declined = 'Declined',
exited = 'Exited',
ACCEPTED = 'ACCEPTED',
PENDING = 'PENDING',
DECLINED = 'DECLINED',
EXITED = 'EXITED',
}

View File

@ -1,23 +1,21 @@
export interface PodLogRequest {
cluster_id: string;
workflow_run_id: string;
pod_name: string;
pod_namespace: string;
pod_type: string;
exp_pod?: string;
runner_pod?: string;
chaos_namespace?: string;
}
export interface PodLogResponse {
workflow_run_id: string;
pod_name: string;
pod_type: string;
workflowRunID: string;
podName: string;
podType: string;
log: string;
}
export interface PodLogVars {
podDetails: PodLogRequest;
export interface PodLogRequest {
request: {
clusterID: string;
workflowRunID: string;
podName: string;
podNamespace: string;
podType: string;
expPod?: string;
runnerPod?: string;
chaosNamespace?: string;
};
}
export interface PodLog {

View File

@ -1,5 +1,5 @@
export interface promQueryInput {
queryid: string;
queryID: string;
query: string;
legend?: string;
resolution?: string;
@ -7,7 +7,7 @@ export interface promQueryInput {
}
export interface promInput {
ds_details: dsDetails;
dsDetails: dsDetails;
queries?: promQueryInput[];
}
@ -46,16 +46,16 @@ export interface promResponse {
}
export interface PrometheusQueryVars {
prometheusInput: promInput;
request: promInput;
}
export interface PrometheusResponse {
GetPromQuery: promResponse;
getPrometheusData: promResponse;
}
export interface promSeriesInput {
series: string;
ds_details: dsDetails;
dsDetails: dsDetails;
}
export interface Option {
@ -71,11 +71,11 @@ export interface promSeriesResponse {
}
export interface PrometheusSeriesQueryVars {
prometheusInput: promSeriesInput;
request: promSeriesInput;
}
export interface PrometheusSeriesResponse {
GetPromLabelNamesAndValues: promSeriesResponse;
getPromLabelNamesAndValues: promSeriesResponse;
}
export interface dsDetails {
@ -89,19 +89,19 @@ export interface promSeriesListResponse {
}
export interface PrometheusSeriesListQueryVars {
prometheusDSInput: dsDetails;
request: dsDetails;
}
export interface PrometheusSeriesListResponse {
GetPromSeriesList: promSeriesListResponse;
getPromSeriesList: promSeriesListResponse;
}
export interface dataVars {
url: string;
start: string;
end: string;
relative_time: number;
refresh_interval: number;
relativeTime: number;
refreshInterval: number;
}
export interface queryMapForPanel {
@ -115,10 +115,10 @@ export interface queryMapForPanelGroup {
}
export interface ViewDashboardInput {
dbID?: string;
prometheusQueries: promQueryInput[];
queryMap: queryMapForPanelGroup[];
dataVarMap: dataVars;
dashboardID?: string;
promQueries: promQueryInput[];
dashboardQueryMap: queryMapForPanelGroup[];
dataVariables: dataVars;
}
export interface metricDataForPanel {

View File

@ -1,26 +1,26 @@
export interface Weights {
experiment_name: string;
experimentName: string;
weightage: number;
}
export interface ScheduleWorkflow {
cluster_id: string;
created_at: string;
clusterID: string;
createdAt: string;
cronSyntax: string;
isCustomWorkflow: string;
project_id: string;
updated_at: string;
projectID: string;
updatedAt: string;
weightages: Weights[];
workflow_description: string;
workflow_id: string;
workflow_manifest: string;
workflow_name: string;
cluster_name: string;
cluster_type: string;
workflowDescription: string;
workflowID: string;
workflowManifest: string;
workflowName: string;
clusterName: string;
clusterType: string;
regularity?: string;
isRemoved: boolean;
last_updated_by: string;
lastUpdatedBy: string;
}
export interface DeleteSchedule {
workflow_id: string;
workflowID: string;
}

View File

@ -1,24 +1,38 @@
export interface ProjectData {
ProjectId: string;
Workflows: {
Schedules: number;
ExpRuns: number;
Runs: number;
projectID: string;
workflows: {
schedules: number;
expRuns: number;
runs: number;
};
Agents: {
Total: number;
Ns: number;
Cluster: number;
agents: {
total: number;
ns: number;
cluster: number;
};
}
export interface AgentStat {
ns: number;
cluster: number;
total: number;
active: number;
}
export interface WorkflowStat {
schedules: number;
runs: number;
expRuns: number;
}
export interface UsageData {
TotalCount: {
Projects: number;
totalCount: {
projects: number;
agents: AgentStat;
workflows: WorkflowStat;
};
Projects: ProjectData[];
projects: ProjectData[];
}
export interface UsageStats {
UsageQuery: UsageData;
export interface UsageStatsResponse {
getUsageData: UsageData;
}

View File

@ -21,7 +21,7 @@ export interface Project {
}
export interface Owner {
UserId: string;
UserID: string;
Username: string;
}
@ -32,37 +32,29 @@ export interface MemberData {
export interface ProjectStats {
Name: string;
ProjectId: string;
ProjectID: string;
Members: MemberData;
}
export interface UserDetails {
username: string;
projects: Project[];
name: string;
email: string;
id: string;
company_name: string;
updated_at: string;
created_at: string;
removed_at: string;
is_email_verified: string;
role: string;
Username: string;
Projects: Project[];
Name: string;
Email: string;
Id: string;
CompanyName: string;
UpdatedAt: string;
CreatedAt: string;
RemovedAt: string;
IsEmailVerified: string;
Role: string;
}
export interface MyHubDetail {
id: string;
HubName: string;
RepoBranch: string;
RepoURL: string;
}
export interface CurrentUserDetails {
getUser: UserDetails;
}
export interface CurrentUserDedtailsVars {
username: string;
hubName: string;
repoBranch: string;
repoURL: string;
}
export interface CreateUserData {
@ -86,83 +78,29 @@ export interface UserData {
username: string;
email: string;
name: string;
logged_in: boolean;
created_at: string;
updated_at: string;
deactivated_at: string;
}
export interface UpdateUserStateInput {
uid: string;
isDeactivate: boolean;
}
export interface SSHKey {
privateKey: string;
publicKey: string;
}
export interface SSHKeys {
generaterSSHKey: SSHKey;
}
export interface MyHubInput {
id?: string;
HubName: string;
RepoURL: string;
RepoBranch: string;
IsPrivate: Boolean;
AuthType: MyHubType;
Token?: string;
UserName?: string;
Password?: string;
SSHPrivateKey?: string;
SSHPublicKey?: string;
}
export interface MyHubData {
id: string;
RepoURL: string;
RepoBranch: string;
ProjectID: string;
HubName: string;
CreatedAt: string;
UpdatedAt: string;
}
export interface CreateMyHub {
MyHubDetails: MyHubInput;
projectID: string;
}
export enum MyHubType {
basic = 'basic',
token = 'token',
ssh = 'ssh',
none = 'none',
loggedIn: boolean;
createdAt: string;
updatedAt: string;
deactivatedAt: string;
}
export interface Projects {
listProjects: Project[];
getProjects: Project[];
}
export interface ProjectDetail {
getProject: Project;
}
export interface ProjectDetailVars {
projectID: string;
}
export enum Role {
viewer = 'Viewer',
editor = 'Editor',
owner = 'Owner',
VIEWER = 'Viewer',
EDITOR = 'Editor',
OWNER = 'Owner',
}
export enum UserRole {
admin = 'admin',
user = 'user',
ADMIN = 'admin',
USER = 'user',
}
export enum InvitationStatus {
@ -171,7 +109,7 @@ export enum InvitationStatus {
}
export enum UserStatus {
DELETED = 'deleted',
ACTIVE = 'active',
INACTIVE = 'inactive',
DELETED = 'DELETED',
ACTIVE = 'ACTIVE',
INACTIVE = 'INACTIVE',
}

View File

@ -32,10 +32,10 @@ export interface Nodes {
}
export interface ExecutionData {
resiliency_score?: number;
experiments_passed?: number;
total_experiments?: number;
event_type: string;
resiliencyScore?: number;
experimentsPassed?: number;
totalExperiments?: number;
eventType: string;
uid: string;
namespace: string;
name: string;
@ -47,47 +47,47 @@ export interface ExecutionData {
}
export interface WeightageMap {
experiment_name: string;
experimentName: string;
weightage: number;
}
export interface WorkflowRun {
workflow_run_id: string;
workflow_id: string;
cluster_name: string;
workflowRunID: string;
workflowID: string;
clusterName: string;
weightages: WeightageMap[];
last_updated: string;
project_id: string;
cluster_id: string;
workflow_name: string;
cluster_type: String;
lastUpdated: string;
projectID: string;
clusterID: string;
workflowName: string;
clusterType: String;
phase: string;
resiliency_score: number;
experiments_passed: number;
experiments_failed: number;
experiments_awaited: number;
experiments_stopped: number;
experiments_na: number;
total_experiments: number;
execution_data: string;
executed_by: string;
resiliencyScore: number;
experimentsPassed: number;
experimentsFailed: number;
experimentsAwaited: number;
experimentsStopped: number;
experimentsNa: number;
totalExperiments: number;
executionData: string;
executedBy: string;
isRemoved: boolean;
}
interface GetWorkflowRunsOutput {
total_no_of_workflow_runs: number;
workflow_runs: WorkflowRun[];
interface GetWorkflowRunsResponse {
totalNoOfWorkflowRuns: number;
workflowRuns: WorkflowRun[];
}
export interface Workflow {
getWorkflowRuns: GetWorkflowRunsOutput;
listWorkflowRuns: GetWorkflowRunsResponse;
}
export interface WorkflowSubscription {
workflowEventListener: WorkflowRun;
getWorkflowEvents: WorkflowRun;
}
export interface WorkflowSubscriptionInput {
export interface WorkflowSubscriptionRequest {
projectID: string;
}
@ -98,15 +98,15 @@ export interface Pagination {
}
// Sort
export interface SortInput {
field: 'Name' | 'Time';
export interface SortRequest {
field: 'NAME' | 'TIME';
descending?: boolean;
}
// Filter
interface DateRange {
start_date: string;
end_date?: string;
startDate: string;
endDate?: string;
}
export type WorkflowStatus =
@ -117,34 +117,34 @@ export type WorkflowStatus =
| 'Terminated'
| undefined;
export interface WorkflowRunFilterInput {
workflow_name?: string;
cluster_name?: string;
workflow_status?: WorkflowStatus;
date_range?: DateRange;
export interface WorkflowRunFilterRequest {
workflowName?: string;
clusterName?: string;
workflowStatus?: WorkflowStatus;
dateRange?: DateRange;
isRemoved?: boolean | null;
}
export interface WorkflowDataVars {
workflowRunsInput: {
project_id: string;
workflow_run_ids?: string[];
workflow_ids?: string[];
export interface WorkflowDataRequest {
request: {
projectID: string;
workflowRunIDs?: string[];
workflowIDs?: string[];
pagination?: Pagination;
sort?: SortInput;
filter?: WorkflowRunFilterInput;
sort?: SortRequest;
filter?: WorkflowRunFilterRequest;
};
}
export interface HeatmapDataVars {
project_id: string;
workflow_id: string;
export interface HeatmapDataRequest {
projectID: string;
workflowID: string;
year: number;
}
export interface WorkflowRunDetails {
no_of_runs: number;
date_stamp: number;
noOfRuns: number;
dateStamp: number;
}
export interface HeatMapData {
value: number;
@ -152,32 +152,32 @@ export interface HeatMapData {
}
export interface HeatmapDataResponse {
getHeatmapData: WeekData[];
listHeatmapData: WeekData[];
}
export interface WorkflowRunStatsResponse {
getWorkflowRunStats: {
total_workflow_runs: number;
succeeded_workflow_runs: number;
failed_workflow_runs: number;
running_workflow_runs: number;
workflow_run_succeeded_percentage: number;
workflow_run_failed_percentage: number;
average_resiliency_score: number;
passed_percentage: number;
failed_percentage: number;
total_experiments: number;
experiments_passed: number;
experiments_failed: number;
experiments_awaited: number;
experiments_stopped: number;
experiments_na: number;
totalWorkflowRuns: number;
succeededWorkflowRuns: number;
failedWorkflowRuns: number;
runningWorkflowRuns: number;
workflowRunSucceededPercentage: number;
workflowRunFailedPercentage: number;
averageResiliencyScore: number;
passedPercentage: number;
failedPercentage: number;
totalExperiments: number;
experimentsPassed: number;
experimentsFailed: number;
experimentsAwaited: number;
experimentsStopped: number;
experimentsNa: number;
};
}
export interface WorkflowRunStatsRequest {
workflowRunStatsRequest: {
project_id: string;
workflow_ids?: string[];
projectID: string;
workflowIDs?: string[];
};
}

View File

@ -1,5 +1,5 @@
export interface WeightageMap {
experiment_name: string;
experimentName: string;
weightage: number;
}
@ -33,8 +33,8 @@ export interface Nodes {
}
export interface ExecutionData {
resiliency_score?: number;
event_type: string;
resiliencyScore?: number;
eventType: string;
uid: string;
namespace: string;
name: string;
@ -46,44 +46,44 @@ export interface ExecutionData {
}
export interface WorkflowRun {
execution_data: string;
last_updated: string;
workflow_run_id: string;
executionData: string;
lastUpdated: string;
workflowRunID: string;
}
export interface ScheduledWorkflow {
workflow_id: string;
workflow_manifest: string;
workflowID: string;
workflowManifest: string;
cronSyntax: string;
cluster_name: string;
workflow_name: string;
workflow_description: string;
clusterName: string;
workflowName: string;
workflowDescription: string;
weightages: WeightageMap[];
isCustomWorkflow: string;
updated_at: string;
created_at: string;
project_id: string;
cluster_id: string;
cluster_type: string;
updatedAt: string;
createdAt: string;
projectID: string;
clusterID: string;
clusterType: string;
isRemoved: Boolean;
last_updated_by: string;
lastUpdatedBy: string;
}
export interface WorkflowList {
ListWorkflow: ScheduledWorkflow[];
export interface GetWorkflow {
getWorkflow: ScheduledWorkflow[];
}
export interface WorkflowListDataVars {
export interface GetWorkflowDataRequest {
projectID: string;
workflowIDs: string[];
}
export interface ListManifestTemplateArray {
template_id: string;
export interface GetManifestTemplateArray {
templateID: string;
manifest: string;
project_name: string;
template_description: string;
template_name: string;
projectName: string;
templateDescription: string;
templateName: string;
isCustomWorkflow: boolean;
}
@ -92,34 +92,35 @@ export interface Pagination {
limit: number;
}
export interface ListManifestTemplate {
ListManifestTemplate: ListManifestTemplateArray[];
export interface GetManifestTemplate {
listWorkflowManifests: GetManifestTemplateArray[];
}
export interface SortInput {
field: 'Name' | 'Time';
export interface SortRequest {
field: 'NAME' | 'TIME';
descending?: Boolean;
}
export interface WorkflowFilterInput {
workflow_name?: string;
cluster_name?: string;
export interface WorkflowFilterRequest {
workflowName?: string;
clusterName?: string;
}
export interface ListWorkflowsInput {
workflowInput: {
project_id: string;
workflow_ids?: string[];
export interface GetWorkflowsRequest {
request: {
projectID: string;
workflowIDs?: string[];
pagination?: Pagination;
sort?: SortInput;
filter?: WorkflowFilterInput;
sort?: SortRequest;
filter?: WorkflowFilterRequest;
};
}
export interface ListWorkflowsOutput {
total_no_of_workflows: number;
export interface GetWorkflowsResponse {
totalNoOfWorkflows: number;
workflows: ScheduledWorkflow[];
}
export interface ScheduledWorkflows {
ListWorkflow: ListWorkflowsOutput;
listWorkflows: GetWorkflowsResponse;
}

View File

@ -4,17 +4,17 @@ export type DateValue = {
};
export enum Filter {
Monthly = 'Monthly',
Daily = 'Daily',
Hourly = 'Hourly',
MONTHLY = 'MONTHLY',
DAILY = 'DAILY',
HOURLY = 'HOURLY',
}
export interface WorkflowStatsResponse {
getWorkflowStats: Array<DateValue>;
listWorkflowStats: Array<DateValue>;
}
export interface WorkflowStatsVars {
filter: Filter;
project_id: string;
show_workflow_runs: boolean;
projectID: string;
showWorkflowRuns: boolean;
}

View File

@ -18,7 +18,7 @@ interface Panel {
panel_name: string;
panel_options: {
points: boolean;
grids: boolean;
grIDs: boolean;
left_axis: boolean;
};
y_axis_left: string;

View File

@ -1,73 +1,75 @@
import { MyHubType } from '../graphql/user';
import { MyHubType } from '../graphql/chaoshub';
export interface Chart {
ApiVersion: string;
Kind: string;
Metadata: Metadata;
Spec: Spec;
PackageInfo: PackageInfo;
apiVersion: string;
kind: string;
metadata: Metadata;
spec: Spec;
packageInfo: PackageInfo;
}
export interface Spec {
DisplayName: string;
CategoryDescription: string;
Keywords: string[];
Maturity: string;
Maintainers: Maintainer[];
MinKubeVersion: string;
Provider: string;
Links: Link[];
Experiments: string[];
ChaosExpCRDLink: string;
Platforms: string[];
ChaosType: string;
displayName: string;
categoryDescription: string;
keywords: string[];
maturity: string;
maintainers: Maintainer[];
minKubeVersion: string;
provider: {
name: string;
};
links: Link[];
experiments: string[];
chaosExpCRDLink: string;
platforms: string[];
chaosType: string;
}
export interface Maintainer {
Name: string;
Email: string;
name: string;
email: string;
}
export interface Link {
Name: string;
Url: string;
name: string;
url: string;
}
export interface Metadata {
Name: string;
Version: string;
Annotations: Annotation[];
name: string;
version: string;
annotations: Annotation[];
}
export interface Annotation {
Categories: string;
Vendor: string;
CreatedAt: string;
Repository: string;
Support: string;
ChartDescription: string;
categories: string;
vendor: string;
createdAt: string;
repository: string;
support: string;
chartDescription: string;
}
export interface PackageInfo {
PackageName: string;
Experiments: Experiments[];
packageName: string;
experiments: Experiments[];
}
export interface Experiments {
Name: string;
Csv: string;
Desc: string;
name: string;
CSV: string;
desc: string;
}
export interface ChartsInput {
HubName: string;
UserName: string;
RepoURL: string;
RepoBranch: string;
hubName: string;
userName: string;
repoURL: string;
repoBranch: string;
}
export interface Charts {
getCharts: Chart[];
listCharts: Chart[];
}
export interface ExperimentDetail {
@ -76,23 +78,23 @@ export interface ExperimentDetail {
export interface HubDetails {
id: string;
HubName: string;
RepoURL: string;
RepoBranch: string;
TotalExp: string;
IsAvailable: boolean;
AuthType?: MyHubType;
IsPrivate: boolean;
Token: string;
UserName: string;
Password: string;
SSHPrivateKey: string;
SSHPublicKey: string;
LastSyncedAt: string;
hubName: string;
repoURL: string;
repoBranch: string;
totalExp: string;
isAvailable: boolean;
authType?: MyHubType;
isPrivate: boolean;
token: string;
userName: string;
password: string;
sshPrivateKey: string;
sshPublicKey: string;
lastSyncedAt: string;
}
export interface HubStatus {
getHubStatus: HubDetails[];
listHubStatus: HubDetails[];
}
export enum MyHubActions {

View File

@ -1,7 +1,7 @@
import { Node } from '../graphql/workflowData';
export interface SelectedNode extends Node {
pod_name: string;
podName: string;
}
export enum NodeSelectionActions {

View File

@ -30,9 +30,9 @@ export interface customWorkflow {
export interface WorkflowData {
chaosEngineChanged: boolean;
namespace: string;
workflow_id?: string;
clusterid: string;
clustername: string;
workflowID?: string;
clusterID: string;
clusterName: string;
cronSyntax: string;
scheduleType: scheduleType;
scheduleInput: scheduleInput;

View File

@ -23,12 +23,12 @@ import {
} from '../../../graphql/mutations';
import { GET_HUB_STATUS } from '../../../graphql/queries';
import {
CreateMyHub,
MyHubData,
MyHubType,
SSHKey,
MyHubData,
CreateMyHub,
SSHKeys,
} from '../../../models/graphql/user';
MyHubType,
} from '../../../models/graphql/chaoshub';
import { HubStatus } from '../../../models/redux/myhub';
import { getProjectID } from '../../../utils/getSearchParams';
import {
@ -90,11 +90,11 @@ const MyHubConnectDrawer: React.FC<MyHubConnectDrawerProps> = ({
});
const { data } = useQuery<HubStatus>(GET_HUB_STATUS, {
variables: { data: projectID },
variables: { projectID },
fetchPolicy: 'network-only',
});
const hubData = data?.getHubStatus.filter(
(hubs) => hubs.HubName === hubName
const hubData = data?.listHubStatus.filter(
(hubs) => hubs.hubName === hubName
)[0];
/**
@ -169,28 +169,28 @@ const MyHubConnectDrawer: React.FC<MyHubConnectDrawerProps> = ({
if (hubName?.length) {
updateMyHub({
variables: {
MyHubDetails: {
request: {
id: hubData?.id,
HubName: gitHub.HubName.trim(),
RepoURL: gitHub.GitURL,
RepoBranch: gitHub.GitBranch,
IsPrivate: isToggled.isPublicToggled
hubName: gitHub.HubName.trim(),
repoURL: gitHub.GitURL,
repoBranch: gitHub.GitBranch,
isPrivate: isToggled.isPublicToggled
? false
: !!isToggled.isPrivateToggled,
AuthType: isToggled.isPublicToggled
? MyHubType.basic
authType: isToggled.isPublicToggled
? MyHubType.BASIC
: privateHub === 'token'
? MyHubType.token
? MyHubType.TOKEN
: privateHub === 'ssh'
? MyHubType.ssh
: MyHubType.basic,
Token: accessToken,
UserName: 'user',
Password: 'user',
SSHPrivateKey: sshKey.privateKey,
SSHPublicKey: sshKey.publicKey,
? MyHubType.SSH
: MyHubType.BASIC,
token: accessToken,
userName: 'user',
password: 'user',
sshPrivateKey: sshKey.privateKey,
sshPublicKey: sshKey.publicKey,
projectID,
},
projectID,
},
});
} else
@ -199,27 +199,27 @@ const MyHubConnectDrawer: React.FC<MyHubConnectDrawerProps> = ({
*/
addMyHub({
variables: {
MyHubDetails: {
HubName: gitHub.HubName.trim(),
RepoURL: gitHub.GitURL,
RepoBranch: gitHub.GitBranch,
IsPrivate: isToggled.isPublicToggled
request: {
hubName: gitHub.HubName.trim(),
repoURL: gitHub.GitURL,
repoBranch: gitHub.GitBranch,
isPrivate: isToggled.isPublicToggled
? false
: !!isToggled.isPrivateToggled,
AuthType: isToggled.isPublicToggled
? MyHubType.basic
authType: isToggled.isPublicToggled
? MyHubType.BASIC
: privateHub === 'token'
? MyHubType.token
? MyHubType.TOKEN
: privateHub === 'ssh'
? MyHubType.ssh
: MyHubType.basic,
Token: accessToken,
UserName: 'user',
Password: 'user',
SSHPrivateKey: sshKey.privateKey,
SSHPublicKey: sshKey.publicKey,
? MyHubType.SSH
: MyHubType.BASIC,
token: accessToken,
userName: 'user',
password: 'user',
sshPrivateKey: sshKey.privateKey,
sshPublicKey: sshKey.publicKey,
projectID,
},
projectID,
},
});
};
@ -267,11 +267,11 @@ const MyHubConnectDrawer: React.FC<MyHubConnectDrawerProps> = ({
if (hubName?.length) {
if (hubData !== undefined) {
setGitHub({
HubName: hubData.HubName,
GitURL: hubData.RepoURL,
GitBranch: hubData.RepoBranch,
HubName: hubData.hubName,
GitURL: hubData.repoURL,
GitBranch: hubData.repoBranch,
});
if (hubData.IsPrivate) {
if (hubData.isPrivate) {
setIsToggled({
isPublicToggled: false,
isPrivateToggled: true,
@ -282,14 +282,14 @@ const MyHubConnectDrawer: React.FC<MyHubConnectDrawerProps> = ({
isPrivateToggled: false,
});
}
if (hubData.AuthType === MyHubType.token) {
if (hubData.authType === MyHubType.TOKEN) {
setPrivateHub('token');
setAccessToken(hubData.Token);
} else if (hubData.AuthType === MyHubType.ssh) {
setAccessToken(hubData.token);
} else if (hubData.authType === MyHubType.SSH) {
setPrivateHub('ssh');
setSshKey({
privateKey: hubData.SSHPrivateKey,
publicKey: hubData.SSHPublicKey,
privateKey: hubData.sshPrivateKey,
publicKey: hubData.sshPublicKey,
});
} else {
setPrivateHub('token');

View File

@ -77,11 +77,11 @@ const CustomMyHubCard: React.FC<customMyHubCardProp> = ({
action={
<div className={classes.mainCardDiv}>
<div
className={hub.IsAvailable ? classes.connected : classes.error}
className={hub.isAvailable ? classes.connected : classes.error}
>
<Center>
<Typography className={classes.statusText}>
{hub.IsAvailable ? 'Connected' : 'Error'}
{hub.isAvailable ? 'Connected' : 'Error'}
</Typography>
</Center>
</div>
@ -128,7 +128,7 @@ const CustomMyHubCard: React.FC<customMyHubCardProp> = ({
data-cy="myHubEdit"
value="View"
onClick={() => {
handleEditHub(hub.HubName);
handleEditHub(hub.hubName);
handleClose();
}}
>
@ -175,7 +175,7 @@ const CustomMyHubCard: React.FC<customMyHubCardProp> = ({
<CardContent
onClick={() => {
history.push({
pathname: `/myhub/${hub.HubName}`,
pathname: `/myhub/${hub.hubName}`,
search: `?projectID=${projectID}&projectRole=${userRole}`,
});
}}
@ -183,7 +183,7 @@ const CustomMyHubCard: React.FC<customMyHubCardProp> = ({
<div className={classes.cardContent}>
<img
src={`./icons/${
hub.HubName === 'Litmus ChaosHub'
hub.hubName === 'Litmus ChaosHub'
? 'myhub-litmus.svg'
: 'my-hub-charts.svg'
}`}
@ -195,11 +195,11 @@ const CustomMyHubCard: React.FC<customMyHubCardProp> = ({
align="center"
className={classes.hubName}
>
<strong>{hub.HubName}</strong>/{hub.RepoBranch}
<strong>{hub.hubName}</strong>/{hub.repoBranch}
</Typography>
<Typography className={classes.totalExp} gutterBottom>
{parseInt(hub.TotalExp, 10) > 0
? `${hub.TotalExp} experiments`
{parseInt(hub.totalExp, 10) > 0
? `${hub.totalExp} experiments`
: t('myhub.error')}
</Typography>
</div>
@ -216,7 +216,7 @@ const CustomMyHubCard: React.FC<customMyHubCardProp> = ({
{t('myhub.lastSync')}
</Typography>
<Typography className={classes.lastSyncText}>
{formatDate(hub.LastSyncedAt)}
{formatDate(hub.lastSyncedAt)}
</Typography>
</div>
)}

View File

@ -41,7 +41,7 @@ const MyHub: React.FC = () => {
// Get MyHubs with Status
const { data, loading, refetch } = useQuery<HubStatus>(GET_HUB_STATUS, {
variables: { data: projectID },
variables: { projectID },
fetchPolicy: 'cache-and-network',
});
@ -83,7 +83,7 @@ const MyHub: React.FC = () => {
refetchQueries: [
{
query: GET_HUB_STATUS,
variables: { data: projectID },
variables: { projectID },
},
],
onError: () => {
@ -98,7 +98,7 @@ const MyHub: React.FC = () => {
},
});
const totalHubs = data && data.getHubStatus;
const totalHubs = data && data.listHubStatus;
const [deleteHub, setDeleteHub] = useState<DeleteHub>({
deleteHubModal: false,

View File

@ -6,7 +6,7 @@ import { useSelector } from 'react-redux';
import BackButton from '../../components/Button/BackButton';
import Loader from '../../components/Loader';
import Wrapper from '../../containers/layouts/Wrapper';
import { LIST_DASHBOARD, LIST_DATASOURCE } from '../../graphql';
import { GET_DASHBOARD, GET_DATASOURCE } from '../../graphql';
import {
DashboardDetails,
PanelDetails,
@ -16,13 +16,13 @@ import {
import {
ApplicationMetadata,
ApplicationMetadataResponse,
DashboardList,
ListDashboardVars,
GetDashboard,
GetDashboardRequest,
PanelGroupResponse,
PanelOption,
PanelResponse,
Resource,
updatePanelGroupInput,
UpdatePanelGroupRequest,
} from '../../models/graphql/dashboardsDetails';
import {
DataSourceList,
@ -55,7 +55,7 @@ const ChooseAndConfigureDashboards: React.FC<ChooseAndConfigureDashboardsProps>
data: dataSourceList,
loading: loadingDataSources,
error: errorFetchingDataSources,
} = useQuery<DataSourceList, ListDataSourceVars>(LIST_DATASOURCE, {
} = useQuery<DataSourceList, ListDataSourceVars>(GET_DATASOURCE, {
variables: { projectID },
fetchPolicy: 'cache-and-network',
});
@ -65,7 +65,7 @@ const ChooseAndConfigureDashboards: React.FC<ChooseAndConfigureDashboardsProps>
data: dashboardList,
loading: loadingDashboard,
error: errorFetchingDashboard,
} = useQuery<DashboardList, ListDashboardVars>(LIST_DASHBOARD, {
} = useQuery<GetDashboard, GetDashboardRequest>(GET_DASHBOARD, {
variables: { projectID, dbID: selectedDashboard.selectedDashboardID },
skip: !configure || selectedDashboard.selectedDashboardID === '',
fetchPolicy: 'network-only',
@ -96,39 +96,39 @@ const ChooseAndConfigureDashboards: React.FC<ChooseAndConfigureDashboardsProps>
const panels: PanelDetails[] = [];
panelGroup.panels.forEach((panel: PanelResponse) => {
const promQueries: PromQueryDetails[] = [];
panel.prom_queries.forEach((promQuery) => {
panel.promQueries.forEach((promQuery) => {
promQueries.push({
queryid: promQuery.queryid,
prom_query_name: promQuery.prom_query_name,
queryID: promQuery.queryID,
promQueryName: promQuery.promQueryName,
legend: promQuery.legend,
resolution: promQuery.resolution,
minstep: promQuery.minstep,
line: promQuery.line,
close_area: promQuery.close_area,
closeArea: promQuery.closeArea,
});
});
const panelOption: PanelOption = {
points: panel.panel_options.points,
grids: panel.panel_options.grids,
left_axis: panel.panel_options.left_axis,
points: panel.panelOptions.points,
grIDs: panel.panelOptions.grIDs,
leftAxis: panel.panelOptions.leftAxis,
};
panels.push({
panel_name: panel.panel_name,
y_axis_left: panel.y_axis_left,
y_axis_right: panel.y_axis_right,
x_axis_down: panel.x_axis_down,
panelName: panel.panelName,
yAxisLeft: panel.yAxisLeft,
yAxisRight: panel.yAxisRight,
xAxisDown: panel.xAxisDown,
unit: panel.unit,
panel_options: panelOption,
prom_queries: promQueries,
panel_id: panel.panel_id,
created_at: panel.created_at,
panel_group_id: panelGroup.panel_group_id,
panel_group_name: panelGroup.panel_group_name,
panelOptions: panelOption,
promQueries,
panelID: panel.panelID,
createdAt: panel.createdAt,
panelGroupID: panelGroup.panelGroupID,
panelGroupName: panelGroup.panelGroupName,
});
});
panelGroups.push({
panel_group_id: panelGroup.panel_group_id,
panel_group_name: panelGroup.panel_group_name,
panelGroupID: panelGroup.panelGroupID,
panelGroupName: panelGroup.panelGroupName,
panels,
});
});
@ -139,12 +139,12 @@ const ChooseAndConfigureDashboards: React.FC<ChooseAndConfigureDashboardsProps>
const getExistingPanelGroupMap = (
panelGroupsInput: PanelGroupResponse[]
) => {
const panelGroupMap: updatePanelGroupInput[] = [];
const panelGroupMap: UpdatePanelGroupRequest[] = [];
if (panelGroupsInput?.length) {
panelGroupsInput.forEach((panelGroup: PanelGroupResponse) => {
panelGroupMap.push({
panel_group_id: panelGroup.panel_group_id,
panel_group_name: panelGroup.panel_group_name,
panelGroupID: panelGroup.panelGroupID,
panelGroupName: panelGroup.panelGroupName,
panels: panelGroup.panels,
});
});
@ -178,28 +178,27 @@ const ChooseAndConfigureDashboards: React.FC<ChooseAndConfigureDashboardsProps>
if (
configure === true &&
dashboardList &&
dashboardList.ListDashboard &&
dashboardList.ListDashboard.length > 0
dashboardList.listDashboard &&
dashboardList.listDashboard.length > 0
) {
const dashboardDetail = dashboardList.ListDashboard[0];
const dashboardDetail = dashboardList.listDashboard[0];
setDashboardVars({
...dashboardVars,
id: selectedDashboard.selectedDashboardID,
name: dashboardDetail.db_name,
dataSourceType: dashboardDetail.ds_type,
dashboardTypeID: dashboardDetail.db_type_id,
dashboardTypeName: dashboardDetail.db_type_name,
dataSourceID: dashboardDetail.ds_id,
dataSourceURL: dashboardDetail.ds_url,
agentID: dashboardDetail.cluster_id,
information: dashboardDetail.db_information,
panelGroupMap: getExistingPanelGroupMap(dashboardDetail.panel_groups),
panelGroups: getExistingPanelGroups(dashboardDetail.panel_groups),
chaosEventQueryTemplate: dashboardDetail.chaos_event_query_template,
chaosVerdictQueryTemplate:
dashboardDetail.chaos_verdict_query_template,
name: dashboardDetail.dbName,
dataSourceType: dashboardDetail.dsType,
dashboardTypeID: dashboardDetail.dbTypeID,
dashboardTypeName: dashboardDetail.dbTypeName,
dataSourceID: dashboardDetail.dsID,
dataSourceURL: dashboardDetail.dsURL,
agentID: dashboardDetail.clusterID,
information: dashboardDetail.dbInformation,
panelGroupMap: getExistingPanelGroupMap(dashboardDetail.panelGroups),
panelGroups: getExistingPanelGroups(dashboardDetail.panelGroups),
chaosEventQueryTemplate: dashboardDetail.chaosEventQueryTemplate,
chaosVerdictQueryTemplate: dashboardDetail.chaosVerdictQueryTemplate,
applicationMetadataMap: getApplicationMetadataMap(
dashboardDetail.application_metadata_map
dashboardDetail.applicationMetadataMap
),
});
}
@ -244,7 +243,7 @@ const ChooseAndConfigureDashboards: React.FC<ChooseAndConfigureDashboardsProps>
configure={configure}
activePanelID={selectedDashboard.activePanelID}
existingDashboardVars={dashboardVars}
dataSourceList={dataSourceList?.ListDataSource ?? []}
dataSourceList={dataSourceList?.listDataSource ?? []}
/>
</>
)}

View File

@ -104,23 +104,20 @@ const DataSourceConfigurePage: React.FC<DataSourceConfigurePageProps> = ({
authType = 'basic auth';
}
const dataSourceInput = {
ds_name: dataSourceVars.name,
ds_type: dataSourceVars.dataSourceType,
ds_url:
dsName: dataSourceVars.name,
dsType: dataSourceVars.dataSourceType,
dsURL:
dataSourceVars.url[dataSourceVars.url.length - 1] !== '/'
? dataSourceVars.url
: dataSourceVars.url.slice(0, -1),
access_type: dataSourceVars.access,
auth_type: authType,
basic_auth_username: dataSourceVars.username,
basic_auth_password: dataSourceVars.password,
scrape_interval: parseInt(
dataSourceVars.scrapeInterval.split('s')[0],
10
),
query_timeout: parseInt(dataSourceVars.queryTimeout.split('s')[0], 10),
http_method: dataSourceVars.httpMethod,
project_id: projectID,
accessType: dataSourceVars.access,
authType,
basicAuthUsername: dataSourceVars.username,
basicAuthPassword: dataSourceVars.password,
scrapeInterval: parseInt(dataSourceVars.scrapeInterval.split('s')[0], 10),
queryTimeout: parseInt(dataSourceVars.queryTimeout.split('s')[0], 10),
httpMethod: dataSourceVars.httpMethod,
projectID,
};
createDataSource({
variables: { DSInput: dataSourceInput },
@ -133,24 +130,21 @@ const DataSourceConfigurePage: React.FC<DataSourceConfigurePageProps> = ({
authType = 'basic auth';
}
const dataSourceInput = {
ds_id: dataSourceVars.id ?? '',
ds_name: dataSourceVars.name,
ds_type: dataSourceVars.dataSourceType,
ds_url:
dsID: dataSourceVars.id ?? '',
dsName: dataSourceVars.name,
dsType: dataSourceVars.dataSourceType,
dsURL:
dataSourceVars.url[dataSourceVars.url.length - 1] !== '/'
? dataSourceVars.url
: dataSourceVars.url.slice(0, -1),
access_type: dataSourceVars.access,
auth_type: authType,
basic_auth_username: dataSourceVars.username,
basic_auth_password: dataSourceVars.password,
scrape_interval: parseInt(
dataSourceVars.scrapeInterval.split('s')[0],
10
),
query_timeout: parseInt(dataSourceVars.queryTimeout.split('s')[0], 10),
http_method: dataSourceVars.httpMethod,
project_id: projectID,
accessType: dataSourceVars.access,
authType,
basicAuthUsername: dataSourceVars.username,
basicAuthPassword: dataSourceVars.password,
scrapeInterval: parseInt(dataSourceVars.scrapeInterval.split('s')[0], 10),
queryTimeout: parseInt(dataSourceVars.queryTimeout.split('s')[0], 10),
httpMethod: dataSourceVars.httpMethod,
projectID,
};
updateDataSource({
variables: { DSInput: dataSourceInput },

View File

@ -94,7 +94,7 @@ const ScheduleWorkflow = () => {
delete newParsedYaml.metadata.generateName;
newParsedYaml.metadata.name = fetchWorkflowNameFromManifest(manifest);
newParsedYaml.metadata.labels = {
workflow_id: workflowData.workflow_id,
workflow_id: workflowData.workflowID,
};
newParsedYaml.spec.workflowSpec = oldParsedYaml.spec;
const tz = {
@ -119,7 +119,7 @@ const ScheduleWorkflow = () => {
newParsedYaml.metadata.name = fetchWorkflowNameFromManifest(manifest);
newParsedYaml.spec = oldParsedYaml.spec.workflowSpec;
newParsedYaml.metadata.labels = {
workflow_id: workflowData.workflow_id,
workflow_id: workflowData.workflowID,
};
NewYaml = YAML.stringify(newParsedYaml);
workflowAction.setWorkflowManifest({
@ -136,7 +136,7 @@ const ScheduleWorkflow = () => {
// newParsedYaml.spec.suspend = false;
delete newParsedYaml.metadata.generateName;
newParsedYaml.metadata.name = fetchWorkflowNameFromManifest(manifest);
newParsedYaml.metadata.labels = { workflow_id: workflowData.workflow_id };
newParsedYaml.metadata.labels = { workflow_id: workflowData.workflowID };
const tz = {
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC',
};

View File

@ -15,14 +15,14 @@ import Loader from '../../components/Loader';
import { parseYamlValidations } from '../../components/YamlEditor/Validations';
import Wrapper from '../../containers/layouts/Wrapper';
import { UPDATE_SCHEDULE } from '../../graphql/mutations';
import { WORKFLOW_LIST_DETAILS } from '../../graphql/queries';
import { GET_WORKFLOW_DETAILS } from '../../graphql/queries';
import {
CreateWorkFlowInput,
CreateWorkFlowRequest,
UpdateWorkflowResponse,
WeightMap,
} from '../../models/graphql/createWorkflowData';
import {
ListWorkflowsInput,
GetWorkflowsRequest,
ScheduledWorkflows,
} from '../../models/graphql/workflowListData';
import { experimentMap, WorkflowData } from '../../models/redux/workflow';
@ -83,14 +83,14 @@ const EditSchedule: React.FC = () => {
const projectID = getProjectID();
const userRole = getProjectRole();
const { data, loading } = useQuery<ScheduledWorkflows, ListWorkflowsInput>(
WORKFLOW_LIST_DETAILS,
const { data, loading } = useQuery<ScheduledWorkflows, GetWorkflowsRequest>(
GET_WORKFLOW_DETAILS,
{
variables: {
workflowInput: {
project_id: projectID,
request: {
projectID,
filter: {
workflow_name: paramData.workflowName,
workflowName: paramData.workflowName,
},
},
},
@ -102,14 +102,14 @@ const EditSchedule: React.FC = () => {
(state: RootState) => state.workflowManifest.manifest
);
const wfDetails = data && data.ListWorkflow.workflows[0];
const wfDetails = data && data.listWorkflows.workflows[0];
const doc = new YAML.Document();
const w: Weights[] = [];
const { cronSyntax, clusterid, clustername } = workflowData;
const { cronSyntax, clusterID, clusterName } = workflowData;
const [createChaosWorkFlow, { error: workflowError }] = useMutation<
UpdateWorkflowResponse,
CreateWorkFlowInput
CreateWorkFlowRequest
>(UPDATE_SCHEDULE, {
onCompleted: () => {
setFinishModalOpen(true);
@ -126,7 +126,7 @@ const EditSchedule: React.FC = () => {
weights.forEach((data) => {
weightData.push({
experiment_name: data.experimentName,
experimentName: data.experimentName,
weightage: data.weight,
});
});
@ -137,19 +137,19 @@ const EditSchedule: React.FC = () => {
const yamlJson = JSON.stringify(yml, null, 2); // Converted to Stringified JSON
const chaosWorkFlowInputs = {
workflow_id: wfDetails?.workflow_id,
workflow_manifest: yamlJson,
workflow_id: wfDetails?.workflowID,
workflowManifest: yamlJson,
cronSyntax,
workflow_name: fetchWorkflowNameFromManifest(manifest),
workflow_description: workflow.description,
workflowName: fetchWorkflowNameFromManifest(manifest),
workflowDescription: workflow.description,
isCustomWorkflow: false,
weightages: weightData,
project_id: projectID,
cluster_id: clusterid,
projectID,
clusterID,
};
createChaosWorkFlow({
variables: { ChaosWorkFlowInput: chaosWorkFlowInputs },
variables: { request: chaosWorkFlowInputs },
});
}
};
@ -159,22 +159,22 @@ const EditSchedule: React.FC = () => {
if (wfDetails !== undefined) {
for (let i = 0; i < wfDetails?.weightages.length; i++) {
w.push({
experimentName: wfDetails?.weightages[i].experiment_name,
experimentName: wfDetails?.weightages[i].experimentName,
weight: wfDetails?.weightages[i].weightage,
});
}
doc.contents = JSON.parse(wfDetails?.workflow_manifest);
doc.contents = JSON.parse(wfDetails?.workflowManifest);
workflowAction.setWorkflowManifest({
manifest: isCronEdited === null ? doc.toString() : manifest,
});
setWorkflow({
name: wfDetails?.workflow_name,
description: wfDetails?.workflow_description,
name: wfDetails?.workflowName,
description: wfDetails?.workflowDescription,
});
localforage.setItem('weights', w);
workflowAction.setWorkflowDetails({
workflow_id: wfDetails?.workflow_id,
clusterid: wfDetails?.cluster_id,
workflow_id: wfDetails?.workflowID,
clusterID: wfDetails?.clusterID,
cronSyntax:
isCronEdited === null ? wfDetails?.cronSyntax : cronSyntax,
scheduleType: {
@ -313,7 +313,7 @@ const EditSchedule: React.FC = () => {
</Typography>
</div>
<Typography className={classes.schCol2}>
{clustername}
{clusterName}
</Typography>
</div>

View File

@ -58,6 +58,7 @@ const GetStarted: React.FC = () => {
.then((data) => {
if ('error' in data) {
console.error(data);
// eslint-disable-next-line no-alert
window.alert('Token expired, please login again');
logout();
}

View File

@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next';
import Loader from '../../components/Loader';
import Wrapper from '../../containers/layouts/Wrapper';
import { GET_CLUSTER_LENGTH } from '../../graphql';
import { Clusters, ClusterVars } from '../../models/graphql/clusterData';
import { ClusterRequest, Clusters } from '../../models/graphql/clusterData';
import { getUsername } from '../../utils/auth';
import { getProjectID } from '../../utils/getSearchParams';
import useStyles from './styles';
@ -19,10 +19,10 @@ const HomePage: React.FC = () => {
const classes = useStyles();
const { t } = useTranslation();
const { data: agentList, loading } = useQuery<Clusters, ClusterVars>(
const { data: agentList, loading } = useQuery<Clusters, ClusterRequest>(
GET_CLUSTER_LENGTH,
{
variables: { project_id: getProjectID() },
variables: { projectID: getProjectID() },
fetchPolicy: 'network-only',
}
);
@ -30,7 +30,7 @@ const HomePage: React.FC = () => {
let agentCount = 0;
if (agentList !== undefined) {
agentCount = agentList.getCluster.length;
agentCount = agentList?.listClusters.length;
}
return (

View File

@ -14,7 +14,7 @@ import { useSelector } from 'react-redux';
import BackButton from '../../components/Button/BackButton';
import Loader from '../../components/Loader';
import Wrapper from '../../containers/layouts/Wrapper';
import { LIST_DASHBOARD, VIEW_DASHBOARD } from '../../graphql';
import { GET_DASHBOARD, VIEW_DASHBOARD } from '../../graphql';
import {
PanelNameAndID,
ParsedChaosEventPrometheusData,
@ -23,9 +23,9 @@ import {
SelectedDashboardInformation,
} from '../../models/dashboardsData';
import {
DashboardList,
ListDashboardResponse,
ListDashboardVars,
GetDashboard,
GetDashboardRequest,
GetDashboardResponse,
PanelGroupResponse,
PanelResponse,
} from '../../models/graphql/dashboardsDetails';
@ -144,7 +144,7 @@ const DashboardPage: React.FC = () => {
loading: loadingDashboards,
error: errorFetchingDashboards,
refetch: refetchDashboards,
} = useQuery<DashboardList, ListDashboardVars>(LIST_DASHBOARD, {
} = useQuery<GetDashboard, GetDashboardRequest>(GET_DASHBOARD, {
variables: {
projectID,
clusterID: selectedDashboard.selectedAgentID,
@ -168,24 +168,24 @@ const DashboardPage: React.FC = () => {
error: errorFetchingDashboardQueries,
} = useSubscription<ViewDashboard, ViewDashboardInput>(VIEW_DASHBOARD, {
variables: {
dbID: selectedDashboardInformation.id,
prometheusQueries: selectedDashboardInformation.promQueries,
queryMap: getDashboardQueryMap(
selectedDashboardInformation.metaData?.panel_groups ?? []
dashboardID: selectedDashboardInformation.id,
promQueries: selectedDashboardInformation.promQueries,
dashboardQueryMap: getDashboardQueryMap(
selectedDashboardInformation.metaData?.panelGroups ?? []
),
dataVarMap: {
dataVariables: {
url: selectedDashboardInformation.dataSourceURL,
start: selectedDashboardInformation.range.startDate,
end: selectedDashboardInformation.range.endDate,
relative_time: selectedDashboardInformation.relativeTime,
refresh_interval: selectedDashboardInformation.refreshInterval,
relativeTime: selectedDashboardInformation.relativeTime,
refreshInterval: selectedDashboardInformation.refreshInterval,
},
},
skip:
loadingDashboards ||
errorFetchingDashboards !== undefined ||
selectedDashboardInformation.promQueries.length === 0 ||
selectedDashboardInformation.metaData?.panel_groups.length === 0 ||
selectedDashboardInformation.metaData?.panelGroups.length === 0 ||
selectedDashboardInformation.dataSourceURL === '' ||
(selectedDashboardInformation.range.startDate === INVALID_DATE &&
selectedDashboardInformation.range.endDate === INVALID_DATE &&
@ -205,13 +205,13 @@ const DashboardPage: React.FC = () => {
onSubscriptionData: (subscriptionUpdate) => {
setPromData({
chaosEventData: ChaosEventDataParserForPrometheus(
subscriptionUpdate.subscriptionData.data?.viewDashboard
subscriptionUpdate.subscriptionData?.data?.viewDashboard
?.annotationsResponse ?? [],
areaGraph,
selectedEvents
),
panelGroupQueryMap: DashboardMetricDataParserForPrometheus(
subscriptionUpdate.subscriptionData.data?.viewDashboard
subscriptionUpdate.subscriptionData?.data?.viewDashboard
?.dashboardMetricsResponse ?? [],
lineGraph,
areaGraph,
@ -236,65 +236,64 @@ const DashboardPage: React.FC = () => {
useEffect(() => {
if (
dashboards &&
dashboards.ListDashboard &&
dashboards.ListDashboard.length
dashboards.listDashboard &&
dashboards.listDashboard.length
) {
if (
selectedDashboardInformation.id !==
selectedDashboardInformation.dashboardKey
) {
const selectedDashboard: ListDashboardResponse =
dashboards.ListDashboard.filter((data) => {
return data.db_id === selectedDashboardInformation.id;
const selectedDashboard: GetDashboardResponse =
dashboards.listDashboard.filter((data) => {
return data.dbID === selectedDashboardInformation.id;
})[0];
const selectedPanelNameAndIDList: PanelNameAndID[] = [];
if (selectedDashboard) {
(selectedDashboard.panel_groups ?? []).forEach(
(selectedDashboard.panelGroups ?? []).forEach(
(panelGroup: PanelGroupResponse) => {
(panelGroup.panels ?? []).forEach((panel: PanelResponse) => {
selectedPanelNameAndIDList.push({
name: panel.panel_name,
id: panel.panel_id,
name: panel.panelName,
id: panel.panelID,
});
});
}
);
setSelectedDashboardInformation({
...selectedDashboardInformation,
dashboardListForAgent: dashboards.ListDashboard,
dashboardListForAgent: dashboards.listDashboard,
metaData: selectedDashboard,
closedAreaQueryIDs: (selectedDashboard.panel_groups ?? [])
closedAreaQueryIDs: (selectedDashboard.panelGroups ?? [])
.flatMap((panelGroup) =>
panelGroup ? panelGroup.panels ?? [] : []
)
.flatMap((panel) => (panel ? panel.prom_queries ?? [] : []))
.filter((query) => query.close_area)
.map((query) => query.queryid),
.flatMap((panel) => (panel ? panel.promQueries ?? [] : []))
.filter((query) => query.closeArea)
.map((query) => query.queryID),
dashboardKey: selectedDashboardInformation.id,
panelNameAndIDList: selectedPanelNameAndIDList,
name: selectedDashboard.db_name,
typeName: selectedDashboard.db_type_name,
typeID: selectedDashboard.db_type_id,
agentName: selectedDashboard.cluster_name,
name: selectedDashboard.dbName,
typeName: selectedDashboard.dbTypeName,
typeID: selectedDashboard.dbTypeID,
agentName: selectedDashboard.clusterName,
urlToIcon: `./icons/${
selectedDashboard.db_type_id.includes('custom')
selectedDashboard.dbTypeID.includes('custom')
? 'custom'
: selectedDashboard.db_type_id
: selectedDashboard.dbTypeID
}_dashboard.svg`,
information: selectedDashboard.db_information,
chaosEventQueryTemplate:
selectedDashboard.chaos_event_query_template,
information: selectedDashboard.dbInformation,
chaosEventQueryTemplate: selectedDashboard.chaosEventQueryTemplate,
chaosVerdictQueryTemplate:
selectedDashboard.chaos_verdict_query_template,
applicationMetadataMap: selectedDashboard.application_metadata_map,
dataSourceURL: selectedDashboard.ds_url,
dataSourceID: selectedDashboard.ds_id,
dataSourceName: selectedDashboard.ds_name,
selectedDashboard.chaosVerdictQueryTemplate,
applicationMetadataMap: selectedDashboard.applicationMetadataMap,
dataSourceURL: selectedDashboard.dsURL,
dataSourceID: selectedDashboard.dsID,
dataSourceName: selectedDashboard.dsName,
promQueries: generatePromQueries(
selectedDashboardInformation.range,
selectedDashboard.panel_groups ?? [],
selectedDashboard.chaos_event_query_template,
selectedDashboard.chaos_verdict_query_template
selectedDashboard.panelGroups ?? [],
selectedDashboard.chaosEventQueryTemplate,
selectedDashboard.chaosVerdictQueryTemplate
),
});
setSelectedPanels(
@ -305,8 +304,8 @@ const DashboardPage: React.FC = () => {
...promData,
panelGroupQueryMap: [],
});
if (selectedDashboard.ds_health_status !== ACTIVE) {
setDataSourceStatus(selectedDashboard.ds_health_status);
if (selectedDashboard.dsHealthStatus !== ACTIVE) {
setDataSourceStatus(selectedDashboard.dsHealthStatus);
}
}
setReFetch(true);
@ -454,21 +453,21 @@ const DashboardPage: React.FC = () => {
classes={{ paper: classes.menuList }}
>
{selectedDashboardInformation.dashboardListForAgent.map(
(data: ListDashboardResponse) => {
(data: GetDashboardResponse) => {
return (
<MenuItem
key={`${data.db_id}-monitoringDashboard`}
value={data.db_id}
key={`${data.dbID}-monitoringDashboard`}
value={data.dbID}
selected={
data.db_id === selectedDashboardInformation.id
data.dbID === selectedDashboardInformation.id
}
onClick={() => {
dashboard.selectDashboard({
selectedDashboardID: data.db_id,
selectedDashboardID: data.dbID,
});
setSelectedDashboardInformation({
...selectedDashboardInformation,
id: data.db_id,
id: data.dbID,
});
setAnchorEl(null);
}}
@ -480,7 +479,7 @@ const DashboardPage: React.FC = () => {
className={classes.btnText}
variant="h5"
>
{data.db_name}
{data.dbName}
</Typography>
</div>
</MenuItem>
@ -544,7 +543,7 @@ const DashboardPage: React.FC = () => {
refreshInterval,
promQueries: generatePromQueries(
range,
selectedDashboardInformation.metaData?.panel_groups ?? [],
selectedDashboardInformation.metaData?.panelGroups ?? [],
selectedDashboardInformation.chaosEventQueryTemplate,
selectedDashboardInformation.chaosVerdictQueryTemplate
),
@ -651,15 +650,15 @@ const DashboardPage: React.FC = () => {
promData.panelGroupQueryMap.length > 0 &&
!reFetching &&
selectedDashboardInformation.metaData &&
selectedDashboardInformation.metaData.panel_groups.length > 0 &&
selectedDashboardInformation.metaData.panel_groups.map(
selectedDashboardInformation.metaData.panelGroups.length > 0 &&
selectedDashboardInformation.metaData.panelGroups.map(
(panelGroup: PanelGroupResponse, index) => (
<div
key={`${panelGroup.panel_group_id}-dashboardPage-div`}
key={`${panelGroup.panelGroupID}-dashboardPage-div`}
data-cy="dashboardPanelGroup"
>
<DashboardPanelGroup
key={`${panelGroup.panel_group_id}-dashboardPage-component`}
key={`${panelGroup.panelGroupID}-dashboardPage-component`}
centralAllowGraphUpdate={centralAllowGraphUpdate}
centralBrushPosition={centralBrushPosition}
handleCentralBrushPosition={(
@ -766,7 +765,7 @@ const DashboardPage: React.FC = () => {
promQueries: generatePromQueries(
range,
selectedDashboardInformation.metaData
?.panel_groups ?? [],
?.panelGroups ?? [],
selectedDashboardInformation.chaosEventQueryTemplate,
selectedDashboardInformation.chaosVerdictQueryTemplate
),
@ -787,7 +786,7 @@ const DashboardPage: React.FC = () => {
promQueries: generatePromQueries(
timeControlObjectFromHistory.range,
selectedDashboardInformation.metaData
?.panel_groups ?? [],
?.panelGroups ?? [],
selectedDashboardInformation.chaosEventQueryTemplate,
selectedDashboardInformation.chaosVerdictQueryTemplate
),
@ -844,7 +843,7 @@ const DashboardPage: React.FC = () => {
endDate: INVALID_DATE,
},
selectedDashboardInformation.metaData
?.panel_groups ?? [],
?.panelGroups ?? [],
selectedDashboardInformation.chaosEventQueryTemplate,
selectedDashboardInformation.chaosVerdictQueryTemplate
),
@ -858,8 +857,8 @@ const DashboardPage: React.FC = () => {
setShowPromQueryResponseLoader(false);
}
}}
panel_group_id={panelGroup.panel_group_id}
panel_group_name={panelGroup.panel_group_name}
panelGroupID={panelGroup.panelGroupID}
panelGroupName={panelGroup.panelGroupName}
panels={panelGroup.panels ?? []}
selectedPanels={selectedPanels}
metricDataForGroup={

View File

@ -63,7 +63,7 @@ const Settings: React.FC = () => {
>
<StyledTab data-cy="my-account" label="My Account" {...tabProps(0)} />
<StyledTab data-cy="teaming" label="Team" {...tabProps(1)} />
{role === UserRole.admin && (
{role === UserRole.ADMIN && (
<StyledTab
data-cy="user-management"
label="User Management"
@ -73,12 +73,12 @@ const Settings: React.FC = () => {
<StyledTab
data-cy="gitOps"
label="GitOps"
{...tabProps(role === UserRole.admin ? 3 : 2)}
{...tabProps(role === UserRole.ADMIN ? 3 : 2)}
/>
<StyledTab
data-cy="image-registry"
label="Image Registry"
{...tabProps(role === UserRole.admin ? 4 : 3)}
{...tabProps(role === UserRole.ADMIN ? 4 : 3)}
/>
</Tabs>
</Paper>
@ -94,7 +94,7 @@ const Settings: React.FC = () => {
</SuspenseLoader>
</TabPanel>
</div>
{role === UserRole.admin && (
{role === UserRole.ADMIN && (
<TabPanel value={settingsTabValue} index={2}>
<SuspenseLoader style={{ height: '50vh' }}>
<UserManagement />
@ -104,7 +104,7 @@ const Settings: React.FC = () => {
<div data-cy="GitOpsPanel">
<TabPanel
value={settingsTabValue}
index={role === UserRole.admin ? 3 : 2}
index={role === UserRole.ADMIN ? 3 : 2}
>
<SuspenseLoader style={{ height: '50vh' }}>
<GitOpsTab />
@ -114,7 +114,7 @@ const Settings: React.FC = () => {
<div data-cy="ImageRegistry">
<TabPanel
value={settingsTabValue}
index={role === UserRole.admin ? 4 : 3}
index={role === UserRole.ADMIN ? 4 : 3}
>
<SuspenseLoader style={{ height: '50vh' }}>
<ImageRegistry />

View File

@ -11,20 +11,20 @@ import { SuspenseLoader } from '../../components/SuspenseLoader';
import { StyledTab, TabPanel } from '../../components/Tabs';
import Wrapper from '../../containers/layouts/Wrapper';
import {
GET_WORKFLOW_DETAILS,
WORKFLOW_DETAILS_WITH_EXEC_DATA,
WORKFLOW_EVENTS_WITH_EXEC_DATA,
WORKFLOW_LIST_DETAILS,
} from '../../graphql';
import { ScheduleWorkflow } from '../../models/graphql/scheduleData';
import {
ExecutionData,
Workflow,
WorkflowDataVars,
WorkflowDataRequest,
WorkflowSubscription,
WorkflowSubscriptionInput,
WorkflowSubscriptionRequest,
} from '../../models/graphql/workflowData';
import {
ListWorkflowsInput,
GetWorkflowsRequest,
ScheduledWorkflows,
} from '../../models/graphql/workflowListData';
import useActions from '../../redux/actions';
@ -46,7 +46,7 @@ const NodeTable = lazy(
);
interface URLParams {
workflowRunId: string;
workflowRunID: string;
}
const WorkflowDetails: React.FC = () => {
@ -70,34 +70,33 @@ const WorkflowDetails: React.FC = () => {
(state: RootState) => state.tabNumber.node
);
const { pod_name } = useSelector((state: RootState) => state.selectedNode);
const { workflowRunId }: URLParams = useParams();
const { podName } = useSelector((state: RootState) => state.selectedNode);
const { workflowRunID }: URLParams = useParams();
// Query to get workflows
const { subscribeToMore, data, error } = useQuery<Workflow, WorkflowDataVars>(
WORKFLOW_DETAILS_WITH_EXEC_DATA,
{
variables: {
workflowRunsInput: {
project_id: projectID,
workflow_run_ids: [workflowRunId],
},
const { subscribeToMore, data, error } = useQuery<
Workflow,
WorkflowDataRequest
>(WORKFLOW_DETAILS_WITH_EXEC_DATA, {
variables: {
request: {
projectID,
workflowRunIDs: [workflowRunID],
},
fetchPolicy: 'cache-and-network',
}
);
},
fetchPolicy: 'cache-and-network',
});
const workflowRun = data?.getWorkflowRuns.workflow_runs[0];
const workflowRun = data?.listWorkflowRuns.workflowRuns[0];
const { data: workflowData, loading } = useQuery<
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
GetWorkflowsRequest
>(GET_WORKFLOW_DETAILS, {
variables: {
workflowInput: {
project_id: projectID,
workflow_ids: [workflowRun?.workflow_id ?? ' '],
request: {
projectID,
workflowIDs: [workflowRun?.workflowID ?? ' '],
},
},
fetchPolicy: 'cache-and-network',
@ -106,29 +105,27 @@ const WorkflowDetails: React.FC = () => {
// Using subscription to get realtime data
useEffect(() => {
if (workflowRun?.phase && workflowRun.phase === 'Running') {
subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({
subscribeToMore<WorkflowSubscription, WorkflowSubscriptionRequest>({
document: WORKFLOW_EVENTS_WITH_EXEC_DATA,
variables: { projectID },
updateQuery: (prev, { subscriptionData }) => {
if (!subscriptionData.data || !prev || !prev.getWorkflowRuns)
if (!subscriptionData.data || !prev || !prev.listWorkflowRuns)
return prev;
const modifiedWorkflows = prev.getWorkflowRuns.workflow_runs.slice();
const newWorkflow = subscriptionData.data.workflowEventListener;
const modifiedWorkflows = prev.listWorkflowRuns.workflowRuns.slice();
const newWorkflow = subscriptionData.data.getWorkflowEvents;
// Update only the required workflowRun
if (
modifiedWorkflows[0].workflow_run_id === newWorkflow.workflow_run_id
)
if (modifiedWorkflows[0].workflowRunID === newWorkflow.workflowRunID)
modifiedWorkflows[0] = newWorkflow;
const totalNoOfWorkflows =
prev.getWorkflowRuns.total_no_of_workflow_runs;
prev.listWorkflowRuns.totalNoOfWorkflowRuns;
return {
getWorkflowRuns: {
total_no_of_workflow_runs: totalNoOfWorkflows,
workflow_runs: modifiedWorkflows,
listWorkflowRuns: {
totalNoOfWorkflowRuns: totalNoOfWorkflows,
workflowRuns: modifiedWorkflows,
},
};
},
@ -141,10 +138,12 @@ const WorkflowDetails: React.FC = () => {
};
useEffect(() => {
const scheduledWorkflow = workflowData?.ListWorkflow.workflows;
const scheduledWorkflow = workflowData?.listWorkflows.workflows;
if (scheduledWorkflow) {
setworkflowSchedulesDetails(
(scheduledWorkflow[0] ? scheduledWorkflow[0] : null) as ScheduleWorkflow
(scheduledWorkflow[0]
? scheduledWorkflow[0]
: null) as unknown as ScheduleWorkflow
);
}
}, [workflowData]);
@ -156,21 +155,19 @@ const WorkflowDetails: React.FC = () => {
// Setting NodeId of first Node in redux for selection of first node in Argo graph by default
useEffect(() => {
if (workflowRun !== undefined && pod_name === '') {
if (workflowRun !== undefined && podName === '') {
if (
JSON.parse(workflowRun.execution_data as string).nodes !== null &&
Object.keys(JSON.parse(workflowRun.execution_data as string).nodes)
JSON.parse(workflowRun.executionData as string).nodes !== null &&
Object.keys(JSON.parse(workflowRun.executionData as string).nodes)
.length
) {
const firstNodeId = JSON.parse(workflowRun.execution_data as string)
const firstNodeId = JSON.parse(workflowRun.executionData as string)
.nodes[
Object.keys(JSON.parse(workflowRun.execution_data as string).nodes)[0]
Object.keys(JSON.parse(workflowRun.executionData as string).nodes)[0]
].name;
nodeSelection.selectNode({
...JSON.parse(workflowRun.execution_data as string).nodes[
firstNodeId
],
pod_name: firstNodeId,
...JSON.parse(workflowRun.executionData as string).nodes[firstNodeId],
podName: firstNodeId,
});
} else {
setWorkflowFailed(true);
@ -185,10 +182,10 @@ const WorkflowDetails: React.FC = () => {
<BackButton />
</div>
{/* If workflowRun data is present then display the workflowRun details */}
{workflowRun && pod_name !== '' && !loading ? (
{workflowRun && podName !== '' && !loading ? (
<div>
<Typography data-cy="wfName" className={classes.title}>
{t('workflowDetailsView.headerDesc')} {workflowRun.workflow_name}
{t('workflowDetailsView.headerDesc')} {workflowRun.workflowName}
</Typography>
{/* AppBar */}
@ -220,7 +217,7 @@ const WorkflowDetails: React.FC = () => {
{/* Argo Workflow DAG Graph */}
<ArgoWorkflow
nodes={
(JSON.parse(workflowRun.execution_data) as ExecutionData)
(JSON.parse(workflowRun.executionData) as ExecutionData)
.nodes
}
setIsInfoToggled={setIsInfoToggled}
@ -229,23 +226,23 @@ const WorkflowDetails: React.FC = () => {
{/* Workflow Details and Experiment Logs */}
{isInfoToggled ? (
<div>
{pod_name !==
JSON.parse(workflowRun.execution_data).nodes[
{podName !==
JSON.parse(workflowRun.executionData).nodes[
Object.keys(
JSON.parse(workflowRun.execution_data as string).nodes
JSON.parse(workflowRun.executionData as string).nodes
)[0]
].name ? (
/* Node details and Logs */
<WorkflowNodeInfo
manifest={
workflowSchedulesDetails?.workflow_manifest as string
workflowSchedulesDetails?.workflowManifest as string
}
setIsInfoToggled={setIsInfoToggled}
cluster_id={workflowRun.cluster_id}
workflow_run_id={workflowRun.workflow_run_id}
clusterID={workflowRun.clusterID}
workflowRunID={workflowRun.workflowRunID}
data={
JSON.parse(
workflowRun.execution_data
workflowRun.executionData
) as ExecutionData
}
/>
@ -254,14 +251,14 @@ const WorkflowDetails: React.FC = () => {
<WorkflowInfo
tab={1}
setIsInfoToggled={setIsInfoToggled}
workflow_phase={workflowRun.phase}
cluster_name={workflowRun.cluster_name}
workflowPhase={workflowRun.phase}
clusterName={workflowRun.clusterName}
data={
JSON.parse(
workflowRun.execution_data
workflowRun.executionData
) as ExecutionData
}
resiliency_score={workflowRun.resiliency_score}
resiliencyScore={workflowRun.resiliencyScore}
/>
)}
</div>
@ -274,27 +271,27 @@ const WorkflowDetails: React.FC = () => {
{/* Workflow Info */}
<WorkflowInfo
tab={2}
workflow_phase={workflowRun.phase}
cluster_name={workflowRun.cluster_name}
data={JSON.parse(workflowRun.execution_data) as ExecutionData}
resiliency_score={workflowRun.resiliency_score}
workflowPhase={workflowRun.phase}
clusterName={workflowRun.clusterName}
data={JSON.parse(workflowRun.executionData) as ExecutionData}
resiliencyScore={workflowRun.resiliencyScore}
/>
{/* Table for all Node details */}
<NodeTable
manifest={
workflowSchedulesDetails?.workflow_manifest as string
workflowSchedulesDetails?.workflowManifest as string
}
data={JSON.parse(workflowRun.execution_data) as ExecutionData}
data={JSON.parse(workflowRun.executionData) as ExecutionData}
handleClose={() => setLogsModalOpen(true)}
/>
{/* Modal for viewing logs of a node */}
<NodeLogsModal
logsOpen={logsModalOpen}
handleClose={() => setLogsModalOpen(false)}
cluster_id={workflowRun.cluster_id}
workflow_run_id={workflowRun.workflow_run_id}
data={JSON.parse(workflowRun.execution_data) as ExecutionData}
workflow_name={workflowRun.workflow_name}
clusterID={workflowRun.clusterID}
workflowRunID={workflowRun.workflowRunID}
data={JSON.parse(workflowRun.executionData) as ExecutionData}
workflowName={workflowRun.workflowName}
/>
</SuspenseLoader>
</TabPanel>

View File

@ -51,8 +51,8 @@ const WorkflowStats: React.FC<WorkflowStatsProps> = ({
>(GET_WORKFLOW_RUNS_STATS, {
variables: {
workflowRunStatsRequest: {
project_id: projectID,
workflow_ids: [workflowID],
projectID,
workflowIDs: [workflowID],
},
},
fetchPolicy: 'cache-and-network',
@ -68,22 +68,22 @@ const WorkflowStats: React.FC<WorkflowStatsProps> = ({
const graphData: RadialChartMetric[] = [
{
value: isSingleRun
? data?.getWorkflowRunStats.experiments_passed ?? 0
: data?.getWorkflowRunStats.succeeded_workflow_runs ?? 0,
? data?.getWorkflowRunStats.experimentsPassed ?? 0
: data?.getWorkflowRunStats.succeededWorkflowRuns ?? 0,
label: isSingleRun ? 'Passed' : 'Succeeded',
baseColor: theme.palette.status.experiment.completed,
},
{
value: isSingleRun
? data?.getWorkflowRunStats.experiments_failed ?? 0
: data?.getWorkflowRunStats.failed_workflow_runs ?? 0,
? data?.getWorkflowRunStats.experimentsFailed ?? 0
: data?.getWorkflowRunStats.failedWorkflowRuns ?? 0,
label: 'Failed',
baseColor: theme.palette.status.experiment.failed,
},
{
value: isSingleRun
? data?.getWorkflowRunStats.experiments_awaited ?? 0
: data?.getWorkflowRunStats.running_workflow_runs ?? 0,
? data?.getWorkflowRunStats.experimentsAwaited ?? 0
: data?.getWorkflowRunStats.runningWorkflowRuns ?? 0,
label: isSingleRun ? 'Awaited' : 'Running',
baseColor: theme.palette.status.workflow.running,
},
@ -92,12 +92,12 @@ const WorkflowStats: React.FC<WorkflowStatsProps> = ({
if (isSingleRun) {
graphData.push(
{
value: data?.getWorkflowRunStats.experiments_stopped ?? 0,
value: data?.getWorkflowRunStats.experimentsStopped ?? 0,
label: 'Stopped',
baseColor: theme.palette.status.experiment.error,
},
{
value: data?.getWorkflowRunStats.experiments_na ?? 0,
value: data?.getWorkflowRunStats.experimentsNa ?? 0,
label: 'NA',
baseColor: theme.palette.status.experiment.omitted,
}
@ -109,10 +109,10 @@ const WorkflowStats: React.FC<WorkflowStatsProps> = ({
};
const progressGraphData = {
value: data?.getWorkflowRunStats.average_resiliency_score ?? 0,
value: data?.getWorkflowRunStats.averageResiliencyScore ?? 0,
label: 'Avg Resiliency Score',
baseColor: getValueColor(
data?.getWorkflowRunStats.average_resiliency_score ?? 0,
data?.getWorkflowRunStats.averageResiliencyScore ?? 0,
resilienceScoreColourMap
),
};
@ -136,7 +136,7 @@ const WorkflowStats: React.FC<WorkflowStatsProps> = ({
heading={
isSingleRun
? 'Experiments'
: data?.getWorkflowRunStats.total_workflow_runs !== 1
: data?.getWorkflowRunStats.totalWorkflowRuns !== 1
? 'Runs'
: 'Run'
}
@ -194,14 +194,13 @@ const WorkflowStats: React.FC<WorkflowStatsProps> = ({
passPercentage={
showWorkflowStats
? data?.getWorkflowRunStats
.workflow_run_succeeded_percentage ?? 0
: data?.getWorkflowRunStats.passed_percentage ?? 0
.workflowRunSucceededPercentage ?? 0
: data?.getWorkflowRunStats.passedPercentage ?? 0
}
failPercentage={
showWorkflowStats
? data?.getWorkflowRunStats
.workflow_run_failed_percentage ?? 0
: data?.getWorkflowRunStats.failed_percentage ?? 0
? data?.getWorkflowRunStats.workflowRunFailedPercentage ?? 0
: data?.getWorkflowRunStats.failedPercentage ?? 0
}
/>
</div>

View File

@ -39,13 +39,13 @@ const InfoSection: React.FC<InfoSectionProps> = ({
<Typography>
Name :{' '}
<span className={classes.infoHint} data-cy="infoWorkflowName">
{data.ListWorkflow.workflows[0].workflow_name}
{data.listWorkflows.workflows[0].workflowName}
</span>
</Typography>
<Typography>
Id :{' '}
<span className={classes.infoHint} data-cy="infoWorkflowId">
{data.ListWorkflow.workflows[0].workflow_id}
{data.listWorkflows.workflows[0].workflowID}
</span>
</Typography>
{data && (
@ -57,7 +57,7 @@ const InfoSection: React.FC<InfoSectionProps> = ({
>
{
YAML.parse(
data?.ListWorkflow.workflows[0].workflow_manifest
data?.listWorkflows.workflows[0].workflowManifest
).metadata.labels.subject
}
</span>
@ -72,7 +72,7 @@ const InfoSection: React.FC<InfoSectionProps> = ({
>
{
YAML.parse(
data?.ListWorkflow.workflows[0].workflow_manifest
data?.listWorkflows.workflows[0].workflowManifest
).metadata.namespace
}
</span>
@ -86,13 +86,13 @@ const InfoSection: React.FC<InfoSectionProps> = ({
<Typography>
Name :{' '}
<span className={classes.infoHint} data-cy="infoAgentName">
{data.ListWorkflow.workflows[0].cluster_name}
{data.listWorkflows.workflows[0].clusterName}
</span>
</Typography>
<Typography>
Id :{' '}
<span className={classes.infoHint} data-cy="infoClusterId">
{data.ListWorkflow.workflows[0].cluster_id}
{data.listWorkflows.workflows[0].clusterID}
</span>
</Typography>
</div>
@ -105,16 +105,16 @@ const InfoSection: React.FC<InfoSectionProps> = ({
Last Run :{' '}
<span className={classes.infoHint}>
{timeDifferenceForDate(
data.ListWorkflow.workflows[0].updated_at
data.listWorkflows.workflows[0].updatedAt
)}
</span>
</Typography>
<Typography data-cy="infoWorkflowNextRun">
Next Run :{' '}
{data.ListWorkflow.workflows[0].cronSyntax ? (
{data.listWorkflows.workflows[0].cronSyntax ? (
<span className={classes.infoHint}>
{parser
.parseExpression(data.ListWorkflow.workflows[0].cronSyntax)
.parseExpression(data.listWorkflows.workflows[0].cronSyntax)
.next()
.toString()}
</span>
@ -126,13 +126,13 @@ const InfoSection: React.FC<InfoSectionProps> = ({
{/* Column 4 */}
<div className={classes.regularity} data-cy="infoWorkflowRegularity">
<Typography className={classes.infoHeader}>Regularity :</Typography>
{data.ListWorkflow.workflows[0].cronSyntax === '' ? (
{data.listWorkflows.workflows[0].cronSyntax === '' ? (
<Typography>Non cron workflow</Typography>
) : (
data.ListWorkflow.workflows[0].cronSyntax !== undefined && (
data.listWorkflows.workflows[0].cronSyntax !== undefined && (
<Typography>
{cronstrue.toString(
data.ListWorkflow.workflows[0].cronSyntax
data.listWorkflows.workflows[0].cronSyntax
)}
</Typography>
)
@ -141,8 +141,8 @@ const InfoSection: React.FC<InfoSectionProps> = ({
</div>
{showMore && (
<WorkflowStats
workflowID={data.ListWorkflow.workflows[0].workflow_id}
isCron={data.ListWorkflow.workflows[0].cronSyntax !== ''}
workflowID={data.listWorkflows.workflows[0].workflowID}
isCron={data.listWorkflows.workflows[0].cronSyntax !== ''}
noOfWorkflowRuns={workflowRunLength ?? 0}
/>
)}

View File

@ -13,7 +13,7 @@ import Center from '../../../containers/layouts/Center';
import { WORKFLOW_DETAILS } from '../../../graphql';
import {
Workflow,
WorkflowDataVars,
WorkflowDataRequest,
} from '../../../models/graphql/workflowData';
import { getProjectID } from '../../../utils/getSearchParams';
import WorkflowRunTable from '../WorkflowRunTable';
@ -60,20 +60,20 @@ const StackedBarGraph: React.FC<StackedBarGraphProps> = ({
return '';
};
const { data, loading } = useQuery<Workflow, WorkflowDataVars>(
const { data, loading } = useQuery<Workflow, WorkflowDataRequest>(
WORKFLOW_DETAILS,
{
variables: {
workflowRunsInput: {
project_id: projectID,
workflow_ids: [workflowID],
request: {
projectID,
workflowIDs: [workflowID],
sort: {
field: 'Time',
field: 'TIME',
},
filter: {
date_range: {
start_date: moment.unix(date).startOf('day').unix().toString(),
end_date: moment.unix(date).endOf('day').unix().toString(),
dateRange: {
startDate: moment.unix(date).startOf('day').unix().toString(),
endDate: moment.unix(date).endOf('day').unix().toString(),
},
},
},
@ -82,30 +82,30 @@ const StackedBarGraph: React.FC<StackedBarGraphProps> = ({
}
);
if (data?.getWorkflowRuns.workflow_runs) {
data.getWorkflowRuns.workflow_runs.forEach((wfrun) => {
if (data?.listWorkflowRuns.workflowRuns) {
data.listWorkflowRuns.workflowRuns.forEach((wfrun) => {
if (wfrun.phase !== 'Running') {
stackBarData.push({
id: wfrun.workflow_run_id,
date: Number(wfrun.last_updated) * 1000,
id: wfrun.workflowRunID,
date: Number(wfrun.lastUpdated) * 1000,
passPercentage:
wfrun.total_experiments &&
wfrun.experiments_passed &&
wfrun.total_experiments > 0
? (wfrun.experiments_passed * 100) / wfrun.total_experiments
wfrun.totalExperiments &&
wfrun.experimentsPassed &&
wfrun.totalExperiments > 0
? (wfrun.experimentsPassed * 100) / wfrun.totalExperiments
: 0,
failPercentage:
wfrun.total_experiments &&
!wfrun.experiments_failed &&
wfrun.total_experiments > 0
? (wfrun.experiments_failed * 100) / wfrun.total_experiments
wfrun.totalExperiments &&
!wfrun.experimentsFailed &&
wfrun.totalExperiments > 0
? (wfrun.experimentsFailed * 100) / wfrun.totalExperiments
: 0,
passCount: wfrun.experiments_passed ?? 0,
failCount: wfrun.experiments_failed ?? 0,
passCount: wfrun.experimentsPassed ?? 0,
failCount: wfrun.experimentsFailed ?? 0,
});
openSeries.data.push({
date: Number(wfrun.last_updated) * 1000,
value: wfrun.resiliency_score ?? 0,
date: Number(wfrun.lastUpdated) * 1000,
value: wfrun.resiliencyScore ?? 0,
});
}
});
@ -220,8 +220,8 @@ const StackedBarGraph: React.FC<StackedBarGraphProps> = ({
</div>
{showTable && (
<WorkflowRunTable
workflowId={workflowID}
workflowRunId={workflowRunID}
workflowID={workflowID}
workflowRunID={workflowRunID}
/>
)}
</div>

View File

@ -14,16 +14,16 @@ import React, { useState } from 'react';
import { useTranslation } from 'react-i18next';
import Loader from '../../../components/Loader';
import Center from '../../../containers/layouts/Center';
import { WORKFLOW_LIST_DETAILS, WORKFLOW_RUN_DETAILS } from '../../../graphql';
import { GET_WORKFLOW_DETAILS, WORKFLOW_RUN_DETAILS } from '../../../graphql';
import {
ExecutionData,
Pagination,
Workflow,
WorkflowDataVars,
WorkflowRunFilterInput,
WorkflowDataRequest,
WorkflowRunFilterRequest,
} from '../../../models/graphql/workflowData';
import {
ListWorkflowsInput,
GetWorkflowsRequest,
ScheduledWorkflows,
WeightageMap,
} from '../../../models/graphql/workflowListData';
@ -60,13 +60,13 @@ interface WorkFlowTests {
}
interface WorkflowRunTableProps {
workflowId: string;
workflowRunId: string;
workflowID: string;
workflowRunID: string;
}
const WorkflowRunTable: React.FC<WorkflowRunTableProps> = ({
workflowId,
workflowRunId,
workflowID,
workflowRunID,
}) => {
// get ProjectID
const projectID = getProjectID();
@ -75,12 +75,12 @@ const WorkflowRunTable: React.FC<WorkflowRunTableProps> = ({
const [wfRunData, setWfRunData] = React.useState<WorkFlowTests[]>([]);
const [dateRange, setDateRange] = React.useState<WorkflowRunFilterInput>({
workflow_name: '',
cluster_name: 'All',
workflow_status: 'All',
date_range: {
start_date: new Date(0).valueOf().toString(),
const [dateRange, setDateRange] = React.useState<WorkflowRunFilterRequest>({
workflowName: '',
clusterName: 'All',
workflowStatus: 'All',
dateRange: {
startDate: new Date(0).valueOf().toString(),
},
});
@ -142,21 +142,21 @@ const WorkflowRunTable: React.FC<WorkflowRunTableProps> = ({
data: weightageDetail,
loading: loadWeightage,
error: errorWeightage,
} = useQuery<ScheduledWorkflows, ListWorkflowsInput>(WORKFLOW_LIST_DETAILS, {
} = useQuery<ScheduledWorkflows, GetWorkflowsRequest>(GET_WORKFLOW_DETAILS, {
variables: {
workflowInput: { project_id: projectID, workflow_ids: [workflowId] },
request: { projectID, workflowIDs: [workflowID] },
},
});
const { loading: loadWfRun, error: errorWfRun } = useQuery<
Workflow,
WorkflowDataVars
WorkflowDataRequest
>(WORKFLOW_RUN_DETAILS, {
variables: {
workflowRunsInput: {
project_id: projectID,
workflow_ids: [workflowId],
workflow_run_ids: [workflowRunId],
request: {
projectID,
workflowIDs: [workflowID],
workflowRunIDs: [workflowRunID],
pagination: {
page: paginationData.page,
limit: paginationData.limit,
@ -166,9 +166,9 @@ const WorkflowRunTable: React.FC<WorkflowRunTableProps> = ({
},
onCompleted: (data) => {
const workflowTestsArray: WorkFlowTests[] = [];
if (data.getWorkflowRuns.workflow_runs.length > 0) {
if (data.listWorkflowRuns.workflowRuns) {
const executionData: ExecutionData = JSON.parse(
data?.getWorkflowRuns?.workflow_runs[0]?.execution_data
data.listWorkflowRuns.workflowRuns[0].executionData
);
const { nodes } = executionData;
let index: number = 1;
@ -178,11 +178,11 @@ const WorkflowRunTable: React.FC<WorkflowRunTableProps> = ({
if (node.chaosData) {
const { chaosData } = node;
const weightageMap: WeightageMap[] = weightageDetail
? weightageDetail?.ListWorkflow.workflows[0]?.weightages
? weightageDetail?.listWorkflows.workflows[0]?.weightages
: [];
/* eslint-disable no-loop-func */
weightageMap.forEach((weightage) => {
if (weightage.experiment_name === node.name) {
if (weightage.experimentName === node.name) {
workflowTestsArray.push({
test_id: index,
test_name: node.name,
@ -274,12 +274,12 @@ const WorkflowRunTable: React.FC<WorkflowRunTableProps> = ({
// Change filter value for date range
setDateRange({
...dateRange,
date_range: {
start_date: new Date(selectStartDate)
dateRange: {
startDate: new Date(selectStartDate)
.setHours(0, 0, 0)
.valueOf()
.toString(),
end_date: new Date(selectEndDate)
endDate: new Date(selectEndDate)
.setHours(23, 59, 59)
.valueOf()
.toString(),

View File

@ -22,15 +22,18 @@ import BackButton from '../../components/Button/BackButton';
import Loader from '../../components/Loader';
import Center from '../../containers/layouts/Center';
import Wrapper from '../../containers/layouts/Wrapper';
import { WORKFLOW_LIST_DETAILS } from '../../graphql/queries';
import {
GET_WORKFLOW_DETAILS,
WORKFLOW_RUN_DETAILS,
} from '../../graphql/queries';
import {
HeatmapDataRequest,
HeatmapDataResponse,
HeatmapDataVars,
Workflow,
WorkflowDataVars,
WorkflowDataRequest,
} from '../../models/graphql/workflowData';
import {
ListWorkflowsInput,
GetWorkflowsRequest,
ScheduledWorkflows,
} from '../../models/graphql/workflowListData';
import { history } from '../../redux/configureStore';
@ -55,9 +58,9 @@ const TestCalendarHeatmapTooltip = ({
{tooltipData?.data?.bin?.bin.value ?? 0}% Average Resiliency
</div>
<div>
{tooltipData?.data?.bin?.bin.workflowRunDetail.no_of_runs ?? 0}{' '}
completed runs on{' '}
{formatDate(tooltipData?.data?.bin?.bin.workflowRunDetail.date_stamp) ??
{tooltipData?.data?.bin?.bin.workflowRunDetail.noOfRuns ?? 0} completed
runs on{' '}
{formatDate(tooltipData?.data?.bin?.bin.workflowRunDetail.dateStamp) ??
''}
</div>
</div>
@ -65,7 +68,7 @@ const TestCalendarHeatmapTooltip = ({
};
interface URLParams {
workflowId: string;
workflowID: string;
}
const valueThreshold = [13, 26, 39, 49, 59, 69, 79, 89, 100];
@ -76,44 +79,36 @@ const WorkflowInfoStats: React.FC = () => {
const { t } = useTranslation();
const theme = useTheme();
const { workflowId }: URLParams = useParams();
const { workflowID }: URLParams = useParams();
// Keep track of whether workflow has run or not
const [hasWorkflowRun, setHasWorkflowRun] = useState<boolean>(true);
// Apollo query to get the scheduled workflow data
const { data } = useQuery<ScheduledWorkflows, ListWorkflowsInput>(
WORKFLOW_LIST_DETAILS,
const { data } = useQuery<ScheduledWorkflows, GetWorkflowsRequest>(
GET_WORKFLOW_DETAILS,
{
variables: {
workflowInput: { project_id: projectID, workflow_ids: [workflowId] },
request: { projectID, workflowIDs: [workflowID] },
},
fetchPolicy: 'cache-and-network',
}
);
const { data: workflowRunData } = useQuery<Workflow, WorkflowDataVars>(
gql`
query workflowDetails($workflowRunsInput: GetWorkflowRunsInput!) {
getWorkflowRuns(workflowRunsInput: $workflowRunsInput) {
total_no_of_workflow_runs
workflow_runs {
workflow_run_id
}
}
}
`,
// TODO: shift out
const { data: workflowRunData } = useQuery<Workflow, WorkflowDataRequest>(
WORKFLOW_RUN_DETAILS,
{
variables: {
workflowRunsInput: {
project_id: projectID,
workflow_ids: [workflowId],
request: {
projectID,
workflowIDs: [workflowID],
},
},
onCompleted: () => {
setHasWorkflowRun(
workflowRunData !== undefined &&
workflowRunData.getWorkflowRuns.total_no_of_workflow_runs > 0
workflowRunData.listWorkflowRuns.totalNoOfWorkflowRuns > 0
);
},
fetchPolicy: 'cache-and-network',
@ -121,7 +116,7 @@ const WorkflowInfoStats: React.FC = () => {
);
const workflowRunID =
workflowRunData?.getWorkflowRuns?.workflow_runs[0]?.workflow_run_id ?? '';
workflowRunData?.listWorkflowRuns?.workflowRuns[0]?.workflowRunID ?? '';
const presentYear = new Date().getFullYear();
const [showTable, setShowTable] = useState<boolean>(false);
@ -136,27 +131,28 @@ const WorkflowInfoStats: React.FC = () => {
setShowTable(false);
};
// TODO: shift out
// Apollo query to get the heatmap data
const { data: heatmapData, loading } = useQuery<
HeatmapDataResponse,
HeatmapDataVars
HeatmapDataRequest
>(
gql`
query getHeatmapData(
$project_id: String!
$workflow_id: String!
query listHeatmapData(
$projectID: String!
$workflowID: String!
$year: Int!
) {
getHeatmapData(
project_id: $project_id
workflow_id: $workflow_id
listHeatmapData(
projectID: $projectID
workflowID: $workflowID
year: $year
) {
bins {
value
workflowRunDetail {
no_of_runs
date_stamp
noOfRuns
dateStamp
}
}
}
@ -164,8 +160,8 @@ const WorkflowInfoStats: React.FC = () => {
`,
{
variables: {
project_id: projectID,
workflow_id: workflowId,
projectID,
workflowID,
year,
},
fetchPolicy: 'cache-and-network',
@ -215,7 +211,7 @@ const WorkflowInfoStats: React.FC = () => {
<div className={classes.headingSection}>
<div className={classes.pageHeading}>
<Typography className={classes.heading} data-cy="statsWorkflowName">
{data?.ListWorkflow.workflows[0].workflow_name}
{data?.listWorkflows.workflows[0].workflowName}
</Typography>
<Typography className={classes.subHeading}>
Heres the statistics of the selected workflow
@ -232,16 +228,16 @@ const WorkflowInfoStats: React.FC = () => {
<InfoSection
data={data}
workflowRunLength={
workflowRunData.getWorkflowRuns.total_no_of_workflow_runs
workflowRunData.listWorkflowRuns.totalNoOfWorkflowRuns
}
/>
)}
{/* Visulization Area */}
{/* Check for cron workflow OR single workflow which has been re-run */}
{data?.ListWorkflow.workflows[0].cronSyntax !== '' ||
(workflowRunData?.getWorkflowRuns.total_no_of_workflow_runs &&
workflowRunData?.getWorkflowRuns.total_no_of_workflow_runs > 1) ? (
{data?.listWorkflows.workflows[0].cronSyntax !== '' ||
(workflowRunData?.listWorkflowRuns.totalNoOfWorkflowRuns &&
workflowRunData?.listWorkflowRuns.totalNoOfWorkflowRuns > 1) ? (
<div className={classes.heatmapArea}>
<div className={classes.heatmapAreaHeading}>
<Typography className={classes.sectionHeading}>
@ -253,7 +249,7 @@ const WorkflowInfoStats: React.FC = () => {
<div className={classes.formControlParent}>
<Typography>
Total runs till date:{' '}
{workflowRunData?.getWorkflowRuns.total_no_of_workflow_runs}
{workflowRunData?.listWorkflowRuns.totalNoOfWorkflowRuns}
</Typography>
<FormControl
className={classes.formControl}
@ -285,12 +281,12 @@ const WorkflowInfoStats: React.FC = () => {
) : (
<div className={classes.heatmapParent} data-cy="statsHeatMap">
<CalendarHeatmap
calendarHeatmapMetric={heatmapData?.getHeatmapData ?? []}
calendarHeatmapMetric={heatmapData?.listHeatmapData ?? []}
valueThreshold={valueThreshold}
CalendarHeatmapTooltip={TestCalendarHeatmapTooltip}
handleBinClick={(bin: any) => {
if (bin) {
if (bin?.bin?.workflowRunDetail.no_of_runs === 0) {
if (bin?.bin?.workflowRunDetail.noOfRuns === 0) {
setDataCheck(true);
setShowStackBar(false);
handleTableClose();
@ -299,7 +295,7 @@ const WorkflowInfoStats: React.FC = () => {
handleTableClose();
setBinResiliencyScore(bin.bin.value);
setWorkflowRunDate(
bin.bin.workflowRunDetail.date_stamp
bin.bin.workflowRunDetail.dateStamp
);
}
} else {
@ -329,7 +325,7 @@ const WorkflowInfoStats: React.FC = () => {
<StackedBarGraph
date={workflowRunDate}
averageResiliency={binResiliencyScore}
workflowID={workflowId}
workflowID={workflowID}
handleTableOpen={handleTableOpen}
handleTableClose={handleTableClose}
showTable={showTable}
@ -345,8 +341,8 @@ const WorkflowInfoStats: React.FC = () => {
</div>
) : (
<WorkflowRunTable
workflowId={workflowId}
workflowRunId={workflowRunID}
workflowID={workflowID}
workflowRunID={workflowRunID}
/>
)}
</Wrapper>

View File

@ -1,24 +1,24 @@
import {
HubDetails,
MyHubAction,
MyHubActions,
HubDetails,
} from '../../models/redux/myhub';
import createReducer from './createReducer';
const initialState: HubDetails = {
id: '',
HubName: '',
RepoURL: '',
RepoBranch: '',
TotalExp: '',
IsAvailable: true,
IsPrivate: false,
Token: '',
UserName: '',
Password: '',
SSHPrivateKey: '',
SSHPublicKey: '',
LastSyncedAt: '',
hubName: '',
repoURL: '',
repoBranch: '',
totalExp: '',
isAvailable: true,
isPrivate: false,
token: '',
userName: '',
password: '',
sshPrivateKey: '',
sshPublicKey: '',
lastSyncedAt: '',
};
export const hubDetails = createReducer<HubDetails>(initialState, {

View File

@ -10,7 +10,7 @@ const initialState: SelectedNode = {
finishedAt: '',
message: '',
name: '',
pod_name: '',
podName: '',
phase: '',
startedAt: '',
type: '',

View File

@ -9,7 +9,7 @@ import createReducer from './createReducer';
const initialState: WorkflowData = {
chaosEngineChanged: false,
namespace: '',
clusterid: '',
clusterID: '',
cronSyntax: '',
scheduleType: {
scheduleOnce: 'now',
@ -22,7 +22,7 @@ const initialState: WorkflowData = {
time: new Date(),
date: new Date(),
},
clustername: '',
clusterName: '',
};
const init: WorkflowManifest = {

View File

@ -1,3 +1,4 @@
/* eslint-disable no-unused-expressions */
import {
ParsedChaosEventPrometheusData,
ParsedMetricPrometheusData,
@ -46,12 +47,12 @@ export const getDashboardQueryMap = (panelGroups: PanelGroupResponse[]) => {
const queryMapPanel: queryMapForPanel[] = [];
panelGroup.panels.forEach((panel) => {
queryMapPanel.push({
panelID: panel.panel_id,
queryIDs: panel.prom_queries.map((query) => query.queryid),
panelID: panel.panelID,
queryIDs: panel.promQueries.map((query) => query.queryID),
});
});
queryMapPanelGroup.push({
panelGroupID: panelGroup.panel_group_id,
panelGroupID: panelGroup.panelGroupID,
panelQueryMap: queryMapPanel,
});
});
@ -91,8 +92,8 @@ export const getPromQueryInput = (
const promQueries: promQueryInput[] = [];
prom_queries.forEach((query: PromQueryDetails) => {
promQueries.push({
queryid: query.queryid,
query: query.prom_query_name,
queryID: query.queryID,
query: query.promQueryName,
legend: query.legend,
resolution: query.resolution,
minstep:
@ -104,7 +105,7 @@ export const getPromQueryInput = (
});
if (withEvents && eventQueryTemplate && verdictQueryTemplate) {
promQueries.push({
queryid: DEFAULT_CHAOS_EVENT_QUERY_ID,
queryID: DEFAULT_CHAOS_EVENT_QUERY_ID,
query: eventQueryTemplate,
legend: DEFAULT_CHAOS_EVENT_AND_VERDICT_PROMETHEUS_QUERY_LEGEND,
resolution: DEFAULT_CHAOS_EVENT_AND_VERDICT_PROMETHEUS_QUERY_RESOLUTION,
@ -118,7 +119,7 @@ export const getPromQueryInput = (
),
});
promQueries.push({
queryid: DEFAULT_CHAOS_VERDICT_QUERY_ID,
queryID: DEFAULT_CHAOS_VERDICT_QUERY_ID,
query: verdictQueryTemplate,
legend: DEFAULT_CHAOS_EVENT_AND_VERDICT_PROMETHEUS_QUERY_LEGEND,
resolution: DEFAULT_CHAOS_EVENT_AND_VERDICT_PROMETHEUS_QUERY_RESOLUTION,
@ -148,7 +149,7 @@ export const generatePromQueries = (
const promQueries: promQueryInput[] = getPromQueryInput(
dashboardMetaPanelGroups
.flatMap((panelGroup) => (panelGroup ? panelGroup.panels ?? [] : []))
.flatMap((panel) => (panel ? panel.prom_queries ?? [] : [])),
.flatMap((panel) => (panel ? panel.promQueries ?? [] : [])),
timeRangeDiff,
true,
chaosEventQueryTemplate,
@ -168,7 +169,7 @@ export const MetricDataParserForPrometheus = (
seriesData: [],
closedAreaData: [],
};
metricData.forEach((queryResponse, mainIndex) => {
metricData?.forEach((queryResponse, mainIndex) => {
if (queryResponse && queryResponse.legends && queryResponse.tsvs) {
let { legends } = queryResponse;
let { tsvs } = queryResponse;
@ -300,7 +301,7 @@ export const DashboardMetricDataParserForPrometheus = (
selectedApplications?: string[]
) => {
const mappedData: QueryMapForPanelGroup[] = [];
metricData.forEach((panelGroupData, panelGroupIndex) => {
metricData?.forEach((panelGroupData, panelGroupIndex) => {
mappedData.push({
panelGroupID: panelGroupData.panelGroupID,
metricDataForGroup: [],

View File

@ -47,11 +47,11 @@ const SaveTemplateModal: React.FC<SaveTemplateModalProps> = ({
ADD_WORKFLOW_TEMPLATE,
{
variables: {
data: {
request: {
manifest: editManifest,
template_name: templateName,
template_description: templateDesc,
project_id: getProjectID(),
templateName,
templateDescription: templateDesc,
projectID: getProjectID(),
isCustomWorkflow,
},
},

View File

@ -134,7 +134,7 @@ const TableData: React.FC<TableDataProps> = ({
const editSchedule = () => {
history.push({
pathname: `/workflows/schedule/${data.project_id}/${data.workflow_name}`,
pathname: `/workflows/schedule/${data.projectID}/${data.workflowName}`,
search: `?projectID=${projectID}&projectRole=${projectRole}`,
});
};
@ -153,7 +153,7 @@ const TableData: React.FC<TableDataProps> = ({
reRunChaosWorkFlow({
variables: {
projectID: getProjectID(),
data: data.workflow_id,
workflowID: data.workflowID,
},
});
};
@ -201,12 +201,12 @@ const TableData: React.FC<TableDataProps> = ({
<Typography>
<span
className={`${classes.boldText} ${
YAML.parse(data.workflow_manifest).spec.suspend === true
YAML.parse(data.workflowManifest).spec.suspend === true
? classes.dark
: ''
}`}
>
{data.workflow_name}
{data.workflowName}
</span>
</Typography>
</TableCell>
@ -214,18 +214,18 @@ const TableData: React.FC<TableDataProps> = ({
<Typography className={classes.clusterData}>
<span
className={
YAML.parse(data.workflow_manifest).spec.suspend === true
YAML.parse(data.workflowManifest).spec.suspend === true
? classes.dark
: ''
}
>
{data.cluster_name}
{data.clusterName}
</span>
</Typography>
</TableCell>
<TableCell>
<Typography className={classes.clusterData}>
<span>{data.last_updated_by || '-'}</span>
<span>{data.lastUpdatedBy || '-'}</span>
</Typography>
</TableCell>
<TableCell>
@ -235,7 +235,7 @@ const TableData: React.FC<TableDataProps> = ({
>
<span
className={
YAML.parse(data.workflow_manifest).spec.suspend === true
YAML.parse(data.workflowManifest).spec.suspend === true
? classes.dark
: ''
}
@ -265,9 +265,9 @@ const TableData: React.FC<TableDataProps> = ({
<div className={classes.weightDiv}>
{data.weightages.map((expData) => {
return (
<div key={expData.experiment_name} style={{ marginBottom: 8 }}>
<div key={expData.experimentName} style={{ marginBottom: 8 }}>
<ExperimentPoints
expName={expData.experiment_name}
expName={expData.experimentName}
weight={expData.weightage}
/>
</div>
@ -283,7 +283,7 @@ const TableData: React.FC<TableDataProps> = ({
>
<span
className={
YAML.parse(data.workflow_manifest).spec.suspend === true
YAML.parse(data.workflowManifest).spec.suspend === true
? classes.dark
: ''
}
@ -320,7 +320,7 @@ const TableData: React.FC<TableDataProps> = ({
{t('chaosWorkflows.browseSchedules.startingDate')} :
</span>
<span className={classes.scheduleDetailsValue}>
{formatDate(data.created_at)}
{formatDate(data.createdAt)}
</span>
</Typography>
<Typography className={classes.scheduleDetailsFlex}>
@ -328,7 +328,7 @@ const TableData: React.FC<TableDataProps> = ({
{t('chaosWorkflows.browseSchedules.lastUpdated')} :
</span>
<span className={classes.scheduleDetailsValue}>
{timeDifferenceForDate(data.updated_at)}
{timeDifferenceForDate(data.updatedAt)}
</span>
</Typography>
<Typography className={classes.scheduleDetailsFlex}>
@ -347,12 +347,12 @@ const TableData: React.FC<TableDataProps> = ({
<TableCell>
<span
className={
YAML.parse(data.workflow_manifest).spec.suspend === true
YAML.parse(data.workflowManifest).spec.suspend === true
? classes.dark
: ''
}
>
{YAML.parse(data.workflow_manifest).spec.suspend === true ? (
{YAML.parse(data.workflowManifest).spec.suspend === true ? (
<Typography>
{t('chaosWorkflows.browseSchedules.scheduleIsDisabled')}
</Typography>
@ -374,7 +374,7 @@ const TableData: React.FC<TableDataProps> = ({
<IconButton
onClick={() => {
tabs.changeWorkflowsTabs(0);
setWorkflowName(data.workflow_name);
setWorkflowName(data.workflowName);
}}
data-cy="showSchedules"
>
@ -450,7 +450,7 @@ const TableData: React.FC<TableDataProps> = ({
</Snackbar>
{projectRole !== 'Viewer' &&
data.cronSyntax !== '' &&
YAML.parse(data.workflow_manifest).spec.suspend !== true && (
YAML.parse(data.workflowManifest).spec.suspend !== true && (
<MenuItem
value="Disable"
onClick={() => {
@ -474,7 +474,7 @@ const TableData: React.FC<TableDataProps> = ({
)}
{projectRole !== 'Viewer' &&
YAML.parse(data.workflow_manifest).spec.suspend === true && (
YAML.parse(data.workflowManifest).spec.suspend === true && (
<MenuItem
value="Enable"
onClick={() => {
@ -499,7 +499,7 @@ const TableData: React.FC<TableDataProps> = ({
<MenuItem
value="Download"
onClick={() =>
downloadYAML(data.workflow_manifest, data.workflow_name)
downloadYAML(data.workflowManifest, data.workflowName)
}
>
<div className={classes.expDiv}>
@ -515,7 +515,7 @@ const TableData: React.FC<TableDataProps> = ({
<MenuItem
value="SaveTemplate"
data-cy="saveTemplate"
onClick={() => handleSaveWorkflowTemplate(data.workflow_manifest)}
onClick={() => handleSaveWorkflowTemplate(data.workflowManifest)}
>
<div className={classes.expDiv}>
<InsertDriveFileOutlined className={classes.downloadBtn} />
@ -570,7 +570,7 @@ const TableData: React.FC<TableDataProps> = ({
variant="error"
className={classes.w7}
onClick={() => {
deleteRow(data.workflow_id);
deleteRow(data.workflowID);
setIsModalOpen(false);
}}
>

View File

@ -27,19 +27,19 @@ import Loader from '../../../components/Loader';
import {
DELETE_WORKFLOW,
GET_CLUSTER_NAMES,
GET_WORKFLOW_DETAILS,
UPDATE_SCHEDULE,
WORKFLOW_LIST_DETAILS,
} from '../../../graphql';
import { Clusters, ClusterVars } from '../../../models/graphql/clusterData';
import { ClusterRequest, Clusters } from '../../../models/graphql/clusterData';
import { WeightMap } from '../../../models/graphql/createWorkflowData';
import { DeleteSchedule } from '../../../models/graphql/scheduleData';
import {
ListWorkflowsInput,
GetWorkflowsRequest,
Pagination,
ScheduledWorkflow,
ScheduledWorkflows,
SortInput,
WorkflowFilterInput,
SortRequest,
WorkflowFilterRequest,
} from '../../../models/graphql/workflowListData';
import { getProjectID } from '../../../utils/getSearchParams';
import useStyles from './styles';
@ -49,9 +49,9 @@ interface BrowseScheduleProps {
setWorkflowName: React.Dispatch<React.SetStateAction<string>>;
}
interface FilterOption extends WorkflowFilterInput {
interface FilterOption extends WorkflowFilterRequest {
suspended?: string;
workflow_type?: string;
workflowType?: string;
}
const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
@ -67,34 +67,34 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
// States for filters
const [filters, setFilters] = useState<FilterOption>({
workflow_name: '',
cluster_name: 'All',
workflowName: '',
clusterName: 'All',
suspended: 'All',
workflow_type: 'All',
workflowType: 'All',
});
// State for sorting
const [sortData, setSortData] = useState<SortInput>({
field: 'Time',
const [sortData, setSortData] = useState<SortRequest>({
field: 'TIME',
descending: true,
});
// Apollo query to get the scheduled data
const { data, refetch, loading, error } = useQuery<
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
GetWorkflowsRequest
>(GET_WORKFLOW_DETAILS, {
variables: {
workflowInput: {
project_id: projectID,
request: {
projectID,
pagination: {
page: paginationData.page,
limit: paginationData.limit,
},
sort: sortData,
filter: {
workflow_name: filters.workflow_name,
cluster_name: filters.cluster_name,
workflowName: filters.workflowName,
clusterName: filters.clusterName,
},
},
},
@ -114,7 +114,7 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
// Disable and re-enable a schedule
const handleToggleSchedule = (schedule: ScheduledWorkflow) => {
const yaml = YAML.parse(schedule.workflow_manifest);
const yaml = YAML.parse(schedule.workflowManifest);
if (yaml.spec.suspend === undefined || yaml.spec.suspend === false) {
yaml.spec.suspend = true;
} else {
@ -125,7 +125,7 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
schedule.weightages.forEach((weightEntry) => {
weightData.push({
experiment_name: weightEntry.experiment_name,
experimentName: weightEntry.experimentName,
weightage: weightEntry.weightage,
});
});
@ -133,14 +133,14 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
updateSchedule({
variables: {
ChaosWorkFlowInput: {
workflow_id: schedule.workflow_id,
workflow_name: schedule.workflow_name,
workflow_description: schedule.workflow_description,
workflowID: schedule.workflowID,
workflowName: schedule.workflowName,
workflowDescription: schedule.workflowDescription,
isCustomWorkflow: schedule.isCustomWorkflow,
cronSyntax: schedule.cronSyntax,
workflow_manifest: JSON.stringify(yaml, null, 2),
project_id: schedule.project_id,
cluster_id: schedule.cluster_id,
workflowManifest: JSON.stringify(yaml, null, 2),
projectID: schedule.projectID,
clusterID: schedule.clusterID,
weightages: weightData,
},
},
@ -148,31 +148,31 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
};
// Query to get list of Clusters
const { data: clusterList } = useQuery<Partial<Clusters>, ClusterVars>(
const { data: clusterList } = useQuery<Partial<Clusters>, ClusterRequest>(
GET_CLUSTER_NAMES,
{
variables: {
project_id: projectID,
projectID,
},
}
);
const filteredWorkflows = data?.ListWorkflow.workflows
const filteredWorkflows = data?.listWorkflows.workflows
.filter((dataRow) =>
filters.suspended === 'All'
? true
: filters.suspended === 'true'
? YAML.parse(dataRow.workflow_manifest).spec.suspend === true
? YAML.parse(dataRow.workflowManifest).spec.suspend === true
: filters.suspended === 'false'
? YAML.parse(dataRow.workflow_manifest).spec.suspend === undefined
? YAML.parse(dataRow.workflowManifest).spec.suspend === undefined
: false
)
.filter((dataRow) =>
filters.workflow_type === 'All'
filters.workflowType === 'All'
? true
: filters.workflow_type === 'workflow'
: filters.workflowType === 'workflow'
? dataRow.cronSyntax.length === 0 || dataRow.cronSyntax === ''
: filters.workflow_type === 'cronworkflow'
: filters.workflowType === 'cronworkflow'
? dataRow.cronSyntax.length > 0
: false
);
@ -182,7 +182,7 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
variables: {
projectID: getProjectID(),
workflowID: wfid,
workflow_run_id: '',
workflowRunID: '',
},
});
};
@ -195,11 +195,11 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
id="input-with-icon-adornment"
placeholder="Search"
className={classes.search}
value={filters.workflow_name}
value={filters.workflowName}
onChange={(event) =>
setFilters({
...filters,
workflow_name: event.target.value as string,
workflowName: event.target.value as string,
})
}
startAdornment={
@ -214,11 +214,11 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
Schedule Type
</InputLabel>
<Select
value={filters.workflow_type}
value={filters.workflowType}
onChange={(event) =>
setFilters({
...filters,
workflow_type: event.target.value as string,
workflowType: event.target.value as string,
})
}
label="Schedule Type"
@ -267,23 +267,20 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
<FormControl variant="outlined" className={classes.formControl}>
<InputLabel className={classes.selectText}>Target Agent</InputLabel>
<Select
value={filters.cluster_name}
value={filters.clusterName}
onChange={(event) =>
setFilters({
...filters,
cluster_name: event.target.value as string,
clusterName: event.target.value as string,
})
}
label="Target Cluster"
className={classes.selectText}
>
<MenuItem value="All">All</MenuItem>
{clusterList?.getCluster?.map((cluster) => (
<MenuItem
key={cluster.cluster_name}
value={cluster.cluster_name}
>
{cluster.cluster_name}
{clusterList?.listClusters?.map((cluster) => (
<MenuItem key={cluster.clusterName} value={cluster.clusterName}>
{cluster.clusterName}
</MenuItem>
))}
</Select>
@ -311,7 +308,7 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
size="small"
onClick={() =>
setSortData({
field: 'Name',
field: 'NAME',
descending: false,
})
}
@ -323,7 +320,7 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
size="small"
onClick={() =>
setSortData({
field: 'Name',
field: 'NAME',
descending: true,
})
}
@ -394,7 +391,7 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
filteredWorkflows.map((data) => (
<TableRow
data-cy="workflowSchedulesTableRow"
key={data.workflow_id}
key={data.workflowID}
>
<TableData
data={data}
@ -419,7 +416,7 @@ const BrowseSchedule: React.FC<BrowseScheduleProps> = ({ setWorkflowName }) => {
<TablePagination
rowsPerPageOptions={[5, 10, 25]}
component="div"
count={data?.ListWorkflow.total_no_of_workflows ?? 0}
count={data?.listWorkflows.totalNoOfWorkflows ?? 0}
rowsPerPage={paginationData.limit}
page={paginationData.page}
onChangePage={(_, page) =>

View File

@ -127,9 +127,9 @@ const HeaderSection: React.FC<HeaderSectionProps> = ({
className={classes.selectText}
>
<MenuItem value="All">All</MenuItem>
{clusterList?.getCluster?.map((cluster) => (
<MenuItem key={cluster.cluster_name} value={cluster.cluster_name}>
{cluster.cluster_name}
{clusterList?.listClusters?.map((cluster) => (
<MenuItem key={cluster.clusterName} value={cluster.clusterName}>
{cluster.clusterName}
</MenuItem>
))}
</Select>

View File

@ -1,35 +1,35 @@
import { useQuery } from '@apollo/client';
import { Typography } from '@material-ui/core';
import React from 'react';
import YAML from 'yaml';
import { useTranslation } from 'react-i18next';
import YAML from 'yaml';
import Loader from '../../../components/Loader';
import YamlEditor from '../../../components/YamlEditor/Editor';
import { WORKFLOW_LIST_DETAILS } from '../../../graphql';
import { GET_WORKFLOW_DETAILS } from '../../../graphql';
import {
ListWorkflowsInput,
GetWorkflowsRequest,
ScheduledWorkflows,
} from '../../../models/graphql/workflowListData';
import useStyles from './styles';
interface ManifestModalProps {
project_id: string;
workflow_id: string | undefined;
projectID: string;
workflowID: string | undefined;
}
const ManifestModal: React.FC<ManifestModalProps> = ({
project_id,
workflow_id,
projectID,
workflowID,
}) => {
const classes = useStyles();
const { t } = useTranslation();
const { data, loading } = useQuery<ScheduledWorkflows, ListWorkflowsInput>(
WORKFLOW_LIST_DETAILS,
const { data, loading } = useQuery<ScheduledWorkflows, GetWorkflowsRequest>(
GET_WORKFLOW_DETAILS,
{
variables: {
workflowInput: {
project_id,
workflow_ids: [workflow_id ?? ''],
request: {
projectID,
workflowIDs: [workflowID ?? ''],
},
},
}
@ -47,7 +47,7 @@ const ManifestModal: React.FC<ManifestModalProps> = ({
<YamlEditor
content={YAML.stringify(
YAML.parse(
data?.ListWorkflow.workflows[0].workflow_manifest as string
data?.listWorkflows.workflows[0].workflowManifest as string
)
)}
filename="Workflow Template"

View File

@ -25,13 +25,13 @@ import { useTranslation } from 'react-i18next';
import TimePopOver from '../../../components/TimePopOver';
import {
DELETE_WORKFLOW,
GET_WORKFLOW_DETAILS,
SYNC_WORKFLOW,
TERMINATE_WORKFLOW,
WORKFLOW_LIST_DETAILS,
} from '../../../graphql';
import { WorkflowRun } from '../../../models/graphql/workflowData';
import {
ListWorkflowsInput,
GetWorkflowsRequest,
ScheduledWorkflows,
} from '../../../models/graphql/workflowListData';
import useActions from '../../../redux/actions';
@ -74,12 +74,12 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
const { data: scheduledWorkflowData } = useQuery<
ScheduledWorkflows,
ListWorkflowsInput
>(WORKFLOW_LIST_DETAILS, {
GetWorkflowsRequest
>(GET_WORKFLOW_DETAILS, {
variables: {
workflowInput: {
project_id: projectID,
workflow_ids: [data.workflow_id ?? ''],
request: {
projectID,
workflowIDs: [data.workflowID ?? ''],
},
},
});
@ -184,7 +184,7 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
<>
{/* Table cell for warning (if the workflow is in running state from 20 mins) */}
<TableCell className={classes.warningTableCell}>
{timeDiff(new Date().getTime(), data.last_updated ?? '') >= 20 &&
{timeDiff(new Date().getTime(), data.lastUpdated ?? '') >= 20 &&
data.phase?.toLowerCase() === 'running' ? (
<IconButton onClick={handleWarningPopOverClick}>
<img src="./icons/warning.svg" alt="warning" width="20" />
@ -220,7 +220,7 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
<Typography className={classes.runningText}>
{t('chaosWorkflows.browseWorkflows.runningFrom')}{' '}
{Math.round(
timeDiff(new Date().getTime(), data.last_updated ?? '')
timeDiff(new Date().getTime(), data.lastUpdated ?? '')
)}{' '}
{t('chaosWorkflows.browseWorkflows.min')}
</Typography>
@ -233,8 +233,8 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
syncWorkflow({
variables: {
projectID: getProjectID(),
workflowID: data.workflow_id,
workflow_run_id: data.workflow_run_id,
workflowID: data.workflowID,
workflowRunID: data.workflowRunID,
},
});
}}
@ -249,8 +249,8 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
deleteWorkflow({
variables: {
projectID: getProjectID(),
workflowID: data.workflow_id,
workflow_run_id: data.workflow_run_id,
workflowID: data.workflowID,
workflowRunID: data.workflowRunID,
},
});
}}
@ -278,43 +278,43 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
style={{ cursor: 'pointer' }}
onClick={() => {
nodeSelection.selectNode({
pod_name: '',
podName: '',
});
history.push({
pathname: `/workflows/${data.workflow_run_id}`,
pathname: `/workflows/${data.workflowRunID}`,
search: `?projectID=${projectID}&projectRole=${projectRole}`,
});
}}
>
<Typography className={classes.boldText} data-cy="workflowName">
{data.workflow_name}
{data.workflowName}
</Typography>
</TableCell>
<TableCell>
<Typography className={classes.clusterName}>
{nameCapitalized(data.cluster_name ?? '')}
{nameCapitalized(data.clusterName ?? '')}
</Typography>
</TableCell>
<TableCell className={classes.reliabiltyData}>
{scheduledWorkflowData?.ListWorkflow.workflows[0]?.weightages[0]
?.experiment_name !== '' ? (
{scheduledWorkflowData?.listWorkflows.workflows[0]?.weightages[0]
?.experimentName !== '' ? (
<>
<Typography data-cy="ResScore">
<span>
{t('chaosWorkflows.browseWorkflows.tableData.overallRR')}
</span>
{data.resiliency_score === undefined ||
data.resiliency_score === null ? (
{data.resiliencyScore === undefined ||
data.resiliencyScore === null ? (
<span className={classes.less}>
{t('chaosWorkflows.browseWorkflows.tableData.na')}
</span>
) : (
<span
className={`${classes.boldText} ${getResiliencyScoreColor(
data.resiliency_score
data.resiliencyScore
)}`}
>
{data.resiliency_score}%
{data.resiliencyScore}%
</span>
)}
</Typography>
@ -324,23 +324,23 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
'chaosWorkflows.browseWorkflows.tableData.experimentsPassed'
)}
</span>
{data.experiments_passed === undefined ||
data.experiments_passed === null ||
data.total_experiments === undefined ||
data.total_experiments === null ||
data.total_experiments === 0 ||
data.resiliency_score === undefined ||
data.resiliency_score === null ? (
{data.experimentsPassed === undefined ||
data.experimentsPassed === null ||
data.totalExperiments === undefined ||
data.totalExperiments === null ||
data.totalExperiments === 0 ||
data.resiliencyScore === undefined ||
data.resiliencyScore === null ? (
<span className={classes.less}>
{t('chaosWorkflows.browseWorkflows.tableData.na')}
</span>
) : (
<span
className={`${classes.boldText} ${getResiliencyScoreColor(
data.resiliency_score
data.resiliencyScore
)}`}
>
{data.experiments_passed}/{data.total_experiments}
{data.experimentsPassed}/{data.totalExperiments}
</span>
)}
</Typography>
@ -353,8 +353,8 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
</TableCell>
<TableCell>
<div>
{scheduledWorkflowData?.ListWorkflow.workflows[0]?.weightages[0]
?.experiment_name !== '' ? (
{scheduledWorkflowData?.listWorkflows.workflows[0]?.weightages[0]
?.experimentName !== '' ? (
<>
<Button
onClick={handlePopOverClick}
@ -365,7 +365,7 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
'chaosWorkflows.browseWorkflows.tableData.showExperiments'
)}
(
{scheduledWorkflowData?.ListWorkflow.workflows[0]?.weightages
{scheduledWorkflowData?.listWorkflows.workflows[0]?.weightages
.length ?? 0}
)
</Typography>
@ -392,14 +392,14 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
}}
>
<div className={classes.popover}>
{scheduledWorkflowData?.ListWorkflow.workflows[0]?.weightages.map(
{scheduledWorkflowData?.listWorkflows.workflows[0]?.weightages.map(
(weightEntry) => (
<div
key={weightEntry.experiment_name}
key={weightEntry.experimentName}
style={{ marginBottom: 8 }}
>
<ExperimentPoints
expName={weightEntry.experiment_name}
expName={weightEntry.experimentName}
weight={weightEntry.weightage}
/>
</div>
@ -416,11 +416,11 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
</div>
</TableCell>
<TableCell>
<TimePopOver unixTime={data.last_updated ?? ''} />
<TimePopOver unixTime={data.lastUpdated ?? ''} />
</TableCell>
<TableCell>
<Typography className={classes.executedBy}>
{data.executed_by || '-'}
{data.executedBy || '-'}
</Typography>
</TableCell>
<TableCell>
@ -445,10 +445,10 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
value="Workflow"
onClick={() => {
nodeSelection.selectNode({
pod_name: '',
podName: '',
});
history.push({
pathname: `/workflows/${data.workflow_run_id}`,
pathname: `/workflows/${data.workflowRunID}`,
search: `?projectID=${projectID}&projectRole=${projectRole}`,
});
}}
@ -468,7 +468,7 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
value="Analysis"
onClick={() => {
history.push({
pathname: `/analytics/workflowStatistics/${data.workflow_id}`,
pathname: `/analytics/workflowStatistics/${data.workflowID}`,
search: `?projectID=${projectID}&projectRole=${projectRole}`,
});
}}
@ -510,10 +510,7 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
</ButtonOutlined>
}
>
<ManifestModal
project_id={projectID}
workflow_id={data.workflow_id}
/>
<ManifestModal projectID={projectID} workflowID={data.workflowID} />
</Modal>
{data.phase?.toLowerCase() === 'running' && (
<MenuItem
@ -522,8 +519,8 @@ const TableData: React.FC<TableDataProps> = ({ data, refetchQuery }) => {
terminateWorkflow({
variables: {
projectID: getProjectID(),
workflowID: data.workflow_id,
workflow_run_id: data.workflow_run_id,
workflowID: data.workflowID,
workflowRunID: data.workflowRunID,
},
});
}}

View File

@ -21,17 +21,17 @@ import {
WORKFLOW_DETAILS,
WORKFLOW_EVENTS,
} from '../../../graphql';
import { Clusters, ClusterVars } from '../../../models/graphql/clusterData';
import { ClusterRequest, Clusters } from '../../../models/graphql/clusterData';
import {
Pagination,
SortInput,
SortRequest,
Workflow,
WorkflowDataVars,
WorkflowDataRequest,
WorkflowRun,
WorkflowRunFilterInput,
WorkflowRunFilterRequest,
WorkflowStatus,
WorkflowSubscription,
WorkflowSubscriptionInput,
WorkflowSubscriptionRequest,
} from '../../../models/graphql/workflowData';
import { getProjectID } from '../../../utils/getSearchParams';
import HeaderSection from './HeaderSection';
@ -58,12 +58,12 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
});
// States for filters
const [filters, setFilters] = useState<WorkflowRunFilterInput>({
workflow_name: workflowName,
cluster_name: 'All',
workflow_status: 'All',
date_range: {
start_date: new Date(0).valueOf().toString(),
const [filters, setFilters] = useState<WorkflowRunFilterRequest>({
workflowName,
clusterName: 'All',
workflowStatus: 'All',
dateRange: {
startDate: new Date(0).valueOf().toString(),
},
});
@ -73,32 +73,32 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
);
// State for sorting
const [sortData, setSortData] = useState<SortInput>({
field: 'Time',
const [sortData, setSortData] = useState<SortRequest>({
field: 'TIME',
descending: true,
});
// Checks if the workflow event from subscription exists in the table
function isFiltered(newWorkflow: WorkflowRun) {
const nameExists =
filters.workflow_name &&
newWorkflow.workflow_name
filters.workflowName &&
newWorkflow.workflowName
.toLowerCase()
.includes(filters.workflow_name.toLowerCase());
.includes(filters.workflowName.toLowerCase());
const clusterExists =
filters.cluster_name === 'All' ||
filters.cluster_name === newWorkflow.cluster_name;
filters.clusterName === 'All' ||
filters.clusterName === newWorkflow.clusterName;
const phaseExists =
filters.workflow_status === 'All' ||
filters.workflow_status === newWorkflow.phase;
filters.workflowStatus === 'All' ||
filters.workflowStatus === newWorkflow.phase;
const dateExists =
filters.date_range &&
newWorkflow.last_updated >= filters.date_range.start_date &&
(filters.date_range.end_date
? newWorkflow.last_updated < filters.date_range.end_date
filters.dateRange &&
newWorkflow.lastUpdated >= filters.dateRange.startDate &&
(filters.dateRange.endDate
? newWorkflow.lastUpdated < filters.dateRange.endDate
: true);
const shouldAddNewWorkflow =
@ -108,11 +108,11 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
}
// Query to get list of Clusters
const { data: clusterList } = useQuery<Partial<Clusters>, ClusterVars>(
const { data: clusterList } = useQuery<Partial<Clusters>, ClusterRequest>(
GET_CLUSTER_NAMES,
{
variables: {
project_id: projectID,
projectID,
},
}
);
@ -120,11 +120,11 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
// Query to get workflows
const { subscribeToMore, data, error, refetch } = useQuery<
Workflow,
WorkflowDataVars
WorkflowDataRequest
>(WORKFLOW_DETAILS, {
variables: {
workflowRunsInput: {
project_id: projectID,
request: {
projectID,
pagination: {
page: paginationData.page,
limit: paginationData.limit,
@ -138,23 +138,22 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
// Using subscription to get realtime data
useEffect(() => {
subscribeToMore<WorkflowSubscription, WorkflowSubscriptionInput>({
subscribeToMore<WorkflowSubscription, WorkflowSubscriptionRequest>({
document: WORKFLOW_EVENTS,
variables: { projectID },
updateQuery: (prev, { subscriptionData }) => {
if (!subscriptionData.data || !prev || !prev.getWorkflowRuns)
if (!subscriptionData.data || !prev || !prev.listWorkflowRuns)
return prev;
const modifiedWorkflows = prev.getWorkflowRuns.workflow_runs.slice();
const newWorkflow = subscriptionData.data.workflowEventListener;
const modifiedWorkflows = prev.listWorkflowRuns.workflowRuns.slice();
const newWorkflow = subscriptionData.data.getWorkflowEvents;
// Updating the query data
let i = 0;
let totalNoOfWorkflows = prev.getWorkflowRuns.total_no_of_workflow_runs;
let totalNoOfWorkflows = prev.listWorkflowRuns.totalNoOfWorkflowRuns;
for (; i < modifiedWorkflows.length; i++) {
if (
modifiedWorkflows[i].workflow_run_id === newWorkflow.workflow_run_id
modifiedWorkflows[i].workflowRunID === newWorkflow.workflowRunID
) {
modifiedWorkflows[i] = newWorkflow;
break;
@ -166,9 +165,9 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
}
return {
getWorkflowRuns: {
total_no_of_workflow_runs: totalNoOfWorkflows,
workflow_runs: modifiedWorkflows,
listWorkflowRuns: {
totalNoOfWorkflowRuns: totalNoOfWorkflows,
workflowRuns: modifiedWorkflows,
},
};
},
@ -191,13 +190,12 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
setOpen(true);
};
const workflowRuns = data?.getWorkflowRuns.workflow_runs;
const workflowRuns = data?.listWorkflowRuns.workflowRuns;
// Functions passed as props in the headerSection
const changeSearch = (
event: React.ChangeEvent<HTMLTextAreaElement | HTMLInputElement>
) => {
setFilters({ ...filters, workflow_name: event.target.value as string });
setFilters({ ...filters, workflowName: event.target.value as string });
setWorkflowName(event.target.value as string);
setPaginationData({ ...paginationData, page: 0 });
};
@ -210,7 +208,7 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
) => {
setFilters({
...filters,
workflow_status: event.target.value as WorkflowStatus,
workflowStatus: event.target.value as WorkflowStatus,
});
setPaginationData({ ...paginationData, page: 0 });
};
@ -221,7 +219,7 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
value: unknown;
}>
) => {
setFilters({ ...filters, cluster_name: event.target.value as string });
setFilters({ ...filters, clusterName: event.target.value as string });
setPaginationData({ ...paginationData, page: 0 });
};
@ -230,12 +228,12 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
// Change filter value for date range
setFilters({
...filters,
date_range: {
start_date: new Date(selectStartDate)
dateRange: {
startDate: new Date(selectStartDate)
.setHours(0, 0, 0)
.valueOf()
.toString(),
end_date: new Date(selectEndDate)
endDate: new Date(selectEndDate)
.setHours(23, 59, 59)
.valueOf()
.toString(),
@ -257,11 +255,11 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
<section className="Heading section">
{/* Header Section */}
<HeaderSection
searchValue={filters.workflow_name}
searchValue={filters.workflowName}
changeSearch={changeSearch}
statusValue={filters.workflow_status}
statusValue={filters.workflowStatus}
changeStatus={changeStatus}
clusterValue={filters.cluster_name}
clusterValue={filters.clusterName}
changeCluster={changeCluster}
popOverClick={handlePopOverClick}
popOverClose={handlePopOverClose}
@ -299,7 +297,7 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
size="small"
onClick={() =>
setSortData({
field: 'Name',
field: 'NAME',
})
}
>
@ -310,7 +308,7 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
size="small"
onClick={() =>
setSortData({
field: 'Name',
field: 'NAME',
descending: true,
})
}
@ -354,7 +352,7 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
size="small"
onClick={() =>
setSortData({
field: 'Time',
field: 'TIME',
descending: true,
})
}
@ -366,7 +364,7 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
size="small"
onClick={() =>
setSortData({
field: 'Time',
field: 'TIME',
})
}
>
@ -401,8 +399,8 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
) : workflowRuns && workflowRuns.length ? (
workflowRuns.map((dataRow) => (
<TableRow
data-cy={dataRow.workflow_name}
key={dataRow.workflow_run_id}
data-cy={dataRow.workflowName}
key={dataRow.workflowRunID}
>
<TableData data={dataRow} refetchQuery={refetch} />
</TableRow>
@ -424,7 +422,7 @@ const BrowseWorkflow: React.FC<BrowseWorkflowProps> = ({
<TablePagination
rowsPerPageOptions={[10, 25, 50]}
component="div"
count={data?.getWorkflowRuns.total_no_of_workflow_runs ?? 0}
count={data?.listWorkflowRuns.totalNoOfWorkflowRuns ?? 0}
rowsPerPage={paginationData.limit}
page={paginationData.page}
onChangePage={(_, page) =>

View File

@ -1,5 +1,6 @@
import { useLazyQuery, useQuery } from '@apollo/client';
import { RadioGroup, Typography, useTheme } from '@material-ui/core';
import ArrowUpwardIcon from '@material-ui/icons/ArrowUpward';
import {
ButtonOutlined,
LitmusCard,
@ -8,22 +9,21 @@ import {
Search,
} from 'litmus-ui';
import React, {
lazy,
forwardRef,
lazy,
useEffect,
useImperativeHandle,
useState,
} from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import ArrowUpwardIcon from '@material-ui/icons/ArrowUpward';
import Loader from '../../../components/Loader';
import { constants } from '../../../constants';
import {
GET_CLUSTER,
GET_IMAGE_REGISTRY,
LIST_IMAGE_REGISTRY,
LIST_IMAGE_REGISTRY_BY_PROJECT_ID,
} from '../../../graphql';
import { ImageRegistryInfo } from '../../../models/redux/image_registry';
import useActions from '../../../redux/actions';
import * as AlertActions from '../../../redux/actions/alert';
import * as ImageRegistryActions from '../../../redux/actions/image_registry';
@ -31,17 +31,16 @@ import * as WorkflowActions from '../../../redux/actions/workflow';
import { RootState } from '../../../redux/reducers';
import { getProjectID, getProjectRole } from '../../../utils/getSearchParams';
import useStyles from './styles';
import Loader from '../../../components/Loader';
const AgentDeployModal = lazy(
() => import('../../../components/AgentDeployModal')
);
interface Cluster {
cluster_name: string;
is_active: boolean;
cluster_id: string;
agent_namespace: string;
clusterName: string;
isActive: boolean;
clusterID: string;
agentNamespace: string;
}
const ChooseWorkflowAgent = forwardRef((_, ref) => {
@ -52,8 +51,8 @@ const ChooseWorkflowAgent = forwardRef((_, ref) => {
const workflow = useActions(WorkflowActions);
const alert = useActions(AlertActions);
const imageRegistry = useActions(ImageRegistryActions);
const clusterid: string = useSelector(
(state: RootState) => state.workflowData.clusterid
const clusterID: string = useSelector(
(state: RootState) => state.workflowData.clusterID
);
const selectedProjectID = getProjectID();
@ -72,36 +71,35 @@ const ChooseWorkflowAgent = forwardRef((_, ref) => {
fetchPolicy: 'network-only',
onCompleted: (data) => {
if (data !== undefined) {
const regData = data.GetImageRegistry
.image_registry_info as ImageRegistryInfo;
const regData = data.getImageRegistry.imageRegistryInfo;
imageRegistry.selectImageRegistry({
image_registry_name: regData.image_registry_name,
image_repo_name: regData.image_repo_name,
image_registry_type: regData.image_registry_type,
secret_name: regData.secret_name,
secret_namespace: regData.secret_namespace,
enable_registry: regData.enable_registry,
is_default: regData.is_default,
image_registry_name: regData.imageRegistryName,
image_repo_name: regData.imageRepoName,
image_registry_type: regData.imageRegistryType,
secret_name: regData.secretName,
secret_namespace: regData.secretNamespace,
enable_registry: regData.enableRegistry,
is_default: regData.isDefault,
update_registry: true,
});
}
},
});
useQuery(LIST_IMAGE_REGISTRY, {
useQuery(LIST_IMAGE_REGISTRY_BY_PROJECT_ID, {
variables: {
data: selectedProjectID,
},
fetchPolicy: 'network-only',
onCompleted: (data) => {
if (
data.ListImageRegistry !== null &&
data.ListImageRegistry.length > 0
data.listImageRegistry !== null &&
data.listImageRegistry.length > 0
) {
getRegistryData({
variables: {
registryid: data.ListImageRegistry[0].image_registry_id,
projectid: selectedProjectID,
imageRegistryID: data.listImageRegistry[0].imageRegistryID,
projectID: selectedProjectID,
},
});
} else {
@ -122,19 +120,19 @@ const ChooseWorkflowAgent = forwardRef((_, ref) => {
const [getCluster, { loading }] = useLazyQuery(GET_CLUSTER, {
onCompleted: (data) => {
const clusters: Cluster[] = [];
if (data && data.getCluster.length !== 0) {
data.getCluster.forEach((e: Cluster) => {
if (e.is_active === true) {
if (data && data.listClusters.length !== 0) {
data.listClusters.forEach((e: Cluster) => {
if (e.isActive === true) {
// Populating all the cluster data in the clusters[] array
clusters.push({
cluster_name: e.cluster_name,
is_active: e.is_active,
cluster_id: e.cluster_id,
agent_namespace: e.agent_namespace,
clusterName: e.clusterName,
isActive: e.isActive,
clusterID: e.clusterID,
agentNamespace: e.agentNamespace,
});
// Setting the initial workflow yaml to be of type Workflow
workflow.setWorkflowDetails({
clusterid: '',
clusterID: '',
cronSyntax: '',
scheduleType: {
scheduleOnce: 'now',
@ -162,7 +160,7 @@ const ChooseWorkflowAgent = forwardRef((_, ref) => {
alert.changeAlertState(true);
return false;
}
if (clusterid === '' || clusterData.length === 0) {
if (clusterID === '' || clusterData.length === 0) {
alert.changeAlertState(true); // No Cluster has been selected and user clicked on Next
return false;
}
@ -171,7 +169,7 @@ const ChooseWorkflowAgent = forwardRef((_, ref) => {
// Rendering once to get the cluster data
useEffect(() => {
getCluster({ variables: { project_id: selectedProjectID } });
getCluster({ variables: { projectID: selectedProjectID } });
}, []);
const handleChange = (event: React.ChangeEvent<HTMLInputElement>) => {
@ -181,7 +179,7 @@ const ChooseWorkflowAgent = forwardRef((_, ref) => {
// Filter the clusters based on search results
const filteredCluster = clusterData.filter((cluster: Cluster) => {
if (search === null) return cluster;
if (cluster.cluster_name.toLowerCase().includes(search.toLowerCase()))
if (cluster.clusterName.toLowerCase().includes(search.toLowerCase()))
return cluster;
return null;
});
@ -190,12 +188,12 @@ const ChooseWorkflowAgent = forwardRef((_, ref) => {
useEffect(() => {
if (currentlySelectedAgent !== '') {
clusterData.forEach((cluster) => {
if (currentlySelectedAgent === cluster.cluster_id) {
if (currentlySelectedAgent === cluster.clusterID) {
workflow.setWorkflowDetails({
clusterid: cluster.cluster_id,
project_id: selectedProjectID,
clustername: cluster.cluster_name,
namespace: cluster.agent_namespace,
clusterID: cluster.clusterID,
projectID: selectedProjectID,
clustername: cluster.clusterName,
namespace: cluster.agentNamespace,
});
}
});
@ -284,25 +282,25 @@ const ChooseWorkflowAgent = forwardRef((_, ref) => {
{filteredCluster?.length > 0 ? (
filteredCluster.map((cluster) => (
<LitmusCard
key={cluster.cluster_id}
glow={currentlySelectedAgent === cluster.cluster_id}
key={cluster.clusterID}
glow={currentlySelectedAgent === cluster.clusterID}
width="40%"
height="4rem"
className={classes.litmusCard}
borderColor={
currentlySelectedAgent === cluster.cluster_id
currentlySelectedAgent === cluster.clusterID
? palette.primary.main
: palette.border.main
}
>
<RadioButton
value={cluster.cluster_id}
value={cluster.clusterID}
className={classes.agentRadioButton}
data-cy={cluster.cluster_name}
data-cy={cluster.clusterName}
>
<div>
<Typography>{cluster.cluster_name}</Typography>
<Typography>{cluster.cluster_id}</Typography>
<Typography>{cluster.clusterName}</Typography>
<Typography>{cluster.clusterID}</Typography>
</div>
</RadioButton>
</LitmusCard>

View File

@ -12,11 +12,11 @@ import React, { useState } from 'react';
import { useTranslation } from 'react-i18next';
import {
DELETE_WORKFLOW_TEMPLATE,
LIST_MANIFEST_TEMPLATE,
GET_MANIFEST_TEMPLATE,
} from '../../../graphql';
import {
ListManifestTemplate,
ListManifestTemplateArray,
GetManifestTemplate,
GetManifestTemplateArray,
} from '../../../models/graphql/workflowListData';
import useActions from '../../../redux/actions';
import * as WorkflowActions from '../../../redux/actions/workflow';
@ -42,11 +42,11 @@ const ChooseWorkflowFromExisting: React.FC<ChooseWorkflowFromExistingProps> = ({
const [search, setSearch] = useState<string | null>(null);
const [selected, setSelected] = useState<string>('');
const workflowAction = useActions(WorkflowActions);
const { data: templateData } = useQuery<ListManifestTemplate>(
LIST_MANIFEST_TEMPLATE,
const { data: templateData } = useQuery<GetManifestTemplate>(
GET_MANIFEST_TEMPLATE,
{
variables: {
data: getProjectID(),
projectID: getProjectID(),
},
fetchPolicy: 'network-only',
}
@ -55,17 +55,17 @@ const ChooseWorkflowFromExisting: React.FC<ChooseWorkflowFromExistingProps> = ({
const [deleteTemplate] = useMutation(DELETE_WORKFLOW_TEMPLATE, {
refetchQueries: [
{
query: LIST_MANIFEST_TEMPLATE,
variables: { data: getProjectID() },
query: GET_MANIFEST_TEMPLATE,
variables: { projectID: getProjectID() },
},
],
});
const filteredExistingWorkflows: ListManifestTemplateArray[] = templateData
? templateData.ListManifestTemplate.filter(
(w: ListManifestTemplateArray) => {
const filteredExistingWorkflows = templateData
? templateData.listWorkflowManifests.filter(
(w: GetManifestTemplateArray) => {
if (search === null) return w;
if (w.template_name.toLowerCase().includes(search.toLowerCase()))
if (w.templateName.toLowerCase().includes(search.toLowerCase()))
return w;
return null;
}
@ -81,7 +81,7 @@ const ChooseWorkflowFromExisting: React.FC<ChooseWorkflowFromExistingProps> = ({
};
selectedExp(selection.id);
const templateData = filteredExistingWorkflows.filter((workflow) => {
return workflow.template_id === event.target.value;
return workflow.templateID === event.target.value;
})[0];
workflowAction.setWorkflowManifest({
isCustomWorkflow: templateData.isCustomWorkflow,
@ -114,22 +114,22 @@ const ChooseWorkflowFromExisting: React.FC<ChooseWorkflowFromExistingProps> = ({
>
{filteredExistingWorkflows && filteredExistingWorkflows.length ? (
filteredExistingWorkflows.map(
(templateData: ListManifestTemplateArray) => (
(templateData: GetManifestTemplateArray) => (
<LitmusCard
width="100%"
height="5rem"
key={templateData.template_id}
key={templateData.templateID}
borderColor={palette.border.main}
className={classes.existingWorkflowCard}
>
<RadioButton value={templateData.template_id.toString()}>
<RadioButton value={templateData.templateID.toString()}>
<div id="body">
<div id="left-div">
<Typography>{templateData.template_name}</Typography>
<Typography>{templateData.templateName}</Typography>
</div>
<div id="right-div">
<Typography>
{templateData.template_description}
{templateData.templateDescription}
</Typography>
</div>
<div id="last-div">
@ -138,7 +138,7 @@ const ChooseWorkflowFromExisting: React.FC<ChooseWorkflowFromExistingProps> = ({
src="./icons/litmus-icon.svg"
alt="Experiment Icon"
/>
<Typography>{templateData.project_name}</Typography>
<Typography>{templateData.projectName}</Typography>
</div>
<img
@ -152,7 +152,7 @@ const ChooseWorkflowFromExisting: React.FC<ChooseWorkflowFromExistingProps> = ({
deleteTemplate({
variables: {
projectID: getProjectID(),
data: templateData.template_id,
data: templateData.templateID,
},
});
}}

View File

@ -18,7 +18,7 @@ const SelectMyHub = () => {
// Get all MyHubs with status
const { data } = useQuery<HubStatus>(GET_HUB_STATUS, {
variables: { data: selectedProjectID },
variables: { projectID: selectedProjectID },
fetchPolicy: 'cache-and-network',
});
@ -38,25 +38,25 @@ const SelectMyHub = () => {
};
useEffect(() => {
if (data?.getHubStatus !== undefined) {
if (data.getHubStatus.length) {
if (data?.listHubStatus !== undefined) {
if (data.listHubStatus.length) {
const hubDetails: MyHubDetail[] = [];
data.getHubStatus.forEach((hub) => {
data.listHubStatus.forEach((hub) => {
/**
* Push only available hubs
*/
if (hub.IsAvailable) {
if (hub.isAvailable) {
hubDetails.push({
id: hub.id,
HubName: hub.HubName,
RepoBranch: hub.RepoBranch,
RepoURL: hub.RepoURL,
hubName: hub.hubName,
repoBranch: hub.repoBranch,
repoURL: hub.repoURL,
});
}
});
setAvailableHubs(hubDetails);
data.getHubStatus.forEach((hubData) => {
if (hubData.HubName.toLowerCase() === 'litmus chaoshub') {
data.listHubStatus.forEach((hubData) => {
if (hubData.hubName.toLowerCase() === 'litmus chaoshub') {
setSelectedHub('Litmus ChaosHub');
localforage.setItem('selectedHub', 'Litmus ChaosHub');
localforage.setItem('hasSetWorkflowData', false);
@ -85,11 +85,11 @@ const SelectMyHub = () => {
>
{availableHubs.map((hubs) => (
<MenuItem
key={hubs.HubName}
key={hubs.hubName}
data-cy="hubOption"
value={hubs.HubName}
value={hubs.hubName}
>
{hubs.HubName}
{hubs.hubName}
</MenuItem>
))}
</Select>

View File

@ -47,7 +47,7 @@ const ChoosePreDefinedExperiments: React.FC<ChoosePreDefinedExperimentsProps> =
// Get all MyHubs with status
const { data } = useQuery<HubStatus>(GET_HUB_STATUS, {
variables: { data: selectedProjectID },
variables: { projectID: selectedProjectID },
fetchPolicy: 'cache-and-network',
});
@ -57,8 +57,8 @@ const ChoosePreDefinedExperiments: React.FC<ChoosePreDefinedExperimentsProps> =
const [getPredefinedWorkflow] = useLazyQuery(GET_PREDEFINED_WORKFLOW_LIST, {
fetchPolicy: 'network-only',
onCompleted: (data) => {
if (data.GetPredefinedWorkflowList !== undefined) {
setWorkflowlist(data.GetPredefinedWorkflowList);
if (data.listPredefinedWorkflows !== undefined) {
setWorkflowlist(data.listPredefinedWorkflows);
}
},
onError: () => {
@ -98,8 +98,8 @@ const ChoosePreDefinedExperiments: React.FC<ChoosePreDefinedExperimentsProps> =
setSelectedHub(event.target.value as string);
getPredefinedWorkflow({
variables: {
hubname: event.target.value as string,
projectid: selectedProjectID,
hubName: event.target.value as string,
projectID: selectedProjectID,
},
});
localforage.setItem('selectedHub', event.target.value as string);
@ -110,32 +110,32 @@ const ChoosePreDefinedExperiments: React.FC<ChoosePreDefinedExperimentsProps> =
* fetch the pre-defined workflows
*/
useEffect(() => {
if (data?.getHubStatus !== undefined) {
if (data.getHubStatus.length) {
if (data?.listHubStatus !== undefined) {
if (data.listHubStatus.length) {
const hubDetails: MyHubDetail[] = [];
data.getHubStatus.forEach((hub) => {
data.listHubStatus.forEach((hub) => {
/**
* Push only available hub
*/
if (hub.IsAvailable) {
if (hub.isAvailable) {
hubDetails.push({
id: hub.id,
HubName: hub.HubName,
RepoBranch: hub.RepoBranch,
RepoURL: hub.RepoURL,
hubName: hub.hubName,
repoBranch: hub.repoBranch,
repoURL: hub.repoURL,
});
}
});
setAvailableHubs(hubDetails);
}
data.getHubStatus.forEach((hubData) => {
if (hubData.HubName.toLowerCase() === 'litmus chaoshub') {
data.listHubStatus.forEach((hubData) => {
if (hubData.hubName.toLowerCase() === 'litmus chaoshub') {
setSelectedHub('Litmus ChaosHub');
localforage.setItem('selectedHub', 'Litmus ChaosHub');
getPredefinedWorkflow({
variables: {
hubname: 'Litmus ChaosHub',
projectid: selectedProjectID,
hubName: 'Litmus ChaosHub',
projectID: selectedProjectID,
},
});
}
@ -163,9 +163,9 @@ const ChoosePreDefinedExperiments: React.FC<ChoosePreDefinedExperimentsProps> =
MenuProps={MenuProps}
>
{availableHubs.map((hubs) => (
<MenuItem key={hubs.HubName} value={hubs.HubName}>
<MenuItem key={hubs.hubName} value={hubs.hubName}>
<Typography data-cy="PreDefinedHubOption">
{hubs.HubName}
{hubs.hubName}
</Typography>
</MenuItem>
))}

View File

@ -181,7 +181,7 @@ const ScheduleWorkflow = forwardRef((_, ref) => {
newParsedYaml.metadata.name = fetchWorkflowNameFromManifest(manifest);
newParsedYaml.metadata.namespace = namespace;
newParsedYaml.metadata.labels = {
workflow_id: workflowData.workflow_id,
workflow_id: workflowData.workflowID,
};
newParsedYaml.spec.workflowSpec = oldParsedYaml.spec;
const tz = {
@ -207,7 +207,7 @@ const ScheduleWorkflow = forwardRef((_, ref) => {
newParsedYaml.metadata.namespace = namespace;
newParsedYaml.spec = oldParsedYaml.spec.workflowSpec;
newParsedYaml.metadata.labels = {
workflow_id: workflowData.workflow_id,
workflow_id: workflowData.workflowID,
};
NewYaml = YAML.stringify(newParsedYaml);
workflow.setWorkflowManifest({
@ -225,7 +225,7 @@ const ScheduleWorkflow = forwardRef((_, ref) => {
delete newParsedYaml.metadata.generateName;
newParsedYaml.metadata.name = fetchWorkflowNameFromManifest(manifest);
newParsedYaml.metadata.namespace = namespace;
newParsedYaml.metadata.labels = { workflow_id: workflowData.workflow_id };
newParsedYaml.metadata.labels = { workflow_id: workflowData.workflowID };
const tz = {
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC',
};

View File

@ -1,3 +1,4 @@
import { useSubscription } from '@apollo/client';
import {
Button,
Checkbox,
@ -9,23 +10,16 @@ import {
Typography,
useTheme,
} from '@material-ui/core';
import React, { useEffect, useState } from 'react';
import { InputField } from 'litmus-ui';
import { Autocomplete } from '@material-ui/lab';
import ToggleButton from '@material-ui/lab/ToggleButton';
import ToggleButtonGroup from '@material-ui/lab/ToggleButtonGroup';
import YAML from 'yaml';
import { useSelector } from 'react-redux';
import { useSubscription } from '@apollo/client';
import { Autocomplete } from '@material-ui/lab';
import { InputField } from 'litmus-ui';
import React, { useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import useStyles from './styles';
import * as WorkflowActions from '../../../../redux/actions/workflow';
import {
WorkflowData,
WorkflowManifest,
} from '../../../../models/redux/workflow';
import { RootState } from '../../../../redux/reducers';
import useActions from '../../../../redux/actions';
import { useSelector } from 'react-redux';
import YAML from 'yaml';
import { constants } from '../../../../constants';
import { KUBE_OBJ } from '../../../../graphql';
import {
GVRRequest,
KubeObjData,
@ -33,9 +27,15 @@ import {
KubeObjResource,
KubeObjResponse,
} from '../../../../models/graphql/createWorkflowData';
import { KUBE_OBJ } from '../../../../graphql';
import { constants } from '../../../../constants';
import {
WorkflowData,
WorkflowManifest,
} from '../../../../models/redux/workflow';
import useActions from '../../../../redux/actions';
import * as WorkflowActions from '../../../../redux/actions/workflow';
import { RootState } from '../../../../redux/reducers';
import { gvrData } from './data';
import useStyles from './styles';
interface AppInfoData {
namespace: string;
@ -71,7 +71,7 @@ const TargetApplication: React.FC<TargetApplicationProp> = ({ gotoStep }) => {
const workflowData: WorkflowData = useSelector(
(state: RootState) => state.workflowData
);
const { clusterid } = workflowData;
const { clusterID } = workflowData;
const engineManifest = YAML.parse(manifest.engineYAML);
/**
@ -161,10 +161,10 @@ const TargetApplication: React.FC<TargetApplicationProp> = ({ gotoStep }) => {
*/
const { data } = useSubscription<KubeObjResponse, KubeObjRequest>(KUBE_OBJ, {
variables: {
data: {
cluster_id: clusterid,
object_type: 'kubeobject',
kube_obj_request: {
request: {
clusterID,
objectType: 'kubeobject',
kubeObjRequest: {
group: GVRObj.group,
version: GVRObj.version,
resource: GVRObj.resource,
@ -184,7 +184,7 @@ const TargetApplication: React.FC<TargetApplicationProp> = ({ gotoStep }) => {
/**
* Parse the kubeObject data
*/
const kubeData: KubeObjData[] = JSON.parse(data.getKubeObject.kube_obj);
const kubeData: KubeObjData[] = JSON.parse(data.getKubeObject.kubeObj);
kubeData.forEach((obj: KubeObjData) => {
const applabels: string[] = [];
if (obj.data != null) {

View File

@ -145,10 +145,10 @@ const TuneWorkflow = forwardRef((_, ref) => {
const [getCharts] = useLazyQuery<Charts>(GET_CHARTS_DATA, {
onCompleted: (data) => {
const allExp: ChartName[] = [];
data.getCharts.forEach((data) => {
return data.Spec.Experiments?.forEach((experiment) => {
data.listCharts.forEach((data) => {
return data.spec.experiments?.forEach((experiment) => {
allExp.push({
ChaosName: data.Metadata.Name,
ChaosName: data.metadata.name,
ExperimentName: experiment,
});
});
@ -166,7 +166,7 @@ const TuneWorkflow = forwardRef((_, ref) => {
{
onCompleted: (data) => {
const wfmanifest = updateEngineName(
YAML.parse(data.GetPredefinedExperimentYAML)
YAML.parse(data.getPredefinedExperimentYAML)
);
const updatedManifestImage = updateManifestImage(
YAML.parse(wfmanifest),
@ -189,7 +189,7 @@ const TuneWorkflow = forwardRef((_, ref) => {
*/
const [getTemplate] = useLazyQuery(GET_TEMPLATE_BY_ID, {
onCompleted: (data) => {
const parsedYAML = YAML.parse(data.GetTemplateManifestByID.manifest);
const parsedYAML = YAML.parse(data.getWorkflowManifestByID.manifest);
const updatedManifestImage = updateManifestImage(
parsedYAML,
@ -308,12 +308,12 @@ const TuneWorkflow = forwardRef((_, ref) => {
localforage.getItem('selectedHub').then((hub) => {
getPredefinedExperimentYaml({
variables: {
experimentInput: {
ProjectID: selectedProjectID,
ChartName: 'predefined',
ExperimentName: (value as WorkflowDetailsProps).CRDLink,
HubName: hub as string,
FileType: 'WORKFLOW',
request: {
projectID: selectedProjectID,
chartName: 'predefined',
experimentName: (value as WorkflowDetailsProps).CRDLink,
hubName: hub as string,
fileType: 'WORKFLOW',
},
},
});
@ -330,7 +330,7 @@ const TuneWorkflow = forwardRef((_, ref) => {
getTemplate({
variables: {
projectID: getProjectID(),
data: (value as ChooseWorkflowRadio).id,
templateID: (value as ChooseWorkflowRadio).id,
},
});
}
@ -340,7 +340,7 @@ const TuneWorkflow = forwardRef((_, ref) => {
localforage.getItem('selectedHub').then((hub) => {
setHubName(hub as string);
getCharts({
variables: { projectID: selectedProjectID, HubName: hub as string },
variables: { projectID: selectedProjectID, hubName: hub as string },
});
});
}
@ -377,23 +377,23 @@ const TuneWorkflow = forwardRef((_, ref) => {
const handleDone = () => {
getExperimentYaml({
variables: {
experimentInput: {
ProjectID: selectedProjectID,
HubName: hubName,
ChartName: selectedExp.split('/')[0],
ExperimentName: selectedExp.split('/')[1],
FileType: 'EXPERIMENT',
request: {
projectID: selectedProjectID,
hubName,
chartName: selectedExp.split('/')[0],
experimentName: selectedExp.split('/')[1],
fileType: 'EXPERIMENT',
},
},
});
getEngineYaml({
variables: {
experimentInput: {
ProjectID: selectedProjectID,
HubName: hubName,
ChartName: selectedExp.split('/')[0],
ExperimentName: selectedExp.split('/')[1],
FileType: 'ENGINE',
request: {
projectID: selectedProjectID,
hubName,
chartName: selectedExp.split('/')[0],
experimentName: selectedExp.split('/')[1],
fileType: 'ENGINE',
},
},
});

View File

@ -20,7 +20,7 @@ import YamlEditor from '../../../components/YamlEditor/Editor';
import { parseYamlValidations } from '../../../components/YamlEditor/Validations';
import { CREATE_WORKFLOW } from '../../../graphql';
import {
CreateWorkFlowInput,
CreateWorkFlowRequest,
CreateWorkflowResponse,
WeightMap,
} from '../../../models/graphql/createWorkflowData';
@ -90,7 +90,7 @@ const VerifyCommit = forwardRef(
(state: RootState) => state.workflowData
);
const { clusterid, cronSyntax, clustername } = workflowData;
const { clusterID, cronSyntax, clusterName } = workflowData;
const { manifest, isCustomWorkflow, isUploaded } = useSelector(
(state: RootState) => state.workflowManifest
@ -226,7 +226,7 @@ const VerifyCommit = forwardRef(
// Create Workflow Mutation
const [createChaosWorkFlow, { loading, error: workflowError }] =
useMutation<CreateWorkflowResponse, CreateWorkFlowInput>(
useMutation<CreateWorkflowResponse, CreateWorkFlowRequest>(
CREATE_WORKFLOW,
{
onError: () => {
@ -246,7 +246,7 @@ const VerifyCommit = forwardRef(
weights.forEach((data) => {
weightData.push({
experiment_name: data.experimentName,
experimentName: data.experimentName,
weightage: data.weight,
});
});
@ -261,17 +261,17 @@ const VerifyCommit = forwardRef(
const yamlJson = JSON.stringify(updatedYaml, null, 2); // Converted to Stringified JSON
const chaosWorkFlowInputs = {
workflow_manifest: yamlJson,
workflowManifest: yamlJson,
cronSyntax,
workflow_name: fetchWorkflowNameFromManifest(manifest),
workflow_description: workflow.description,
workflowName: fetchWorkflowNameFromManifest(manifest),
workflowDescription: workflow.description,
isCustomWorkflow,
weightages: weightData,
project_id: getProjectID(),
cluster_id: clusterid,
projectID: getProjectID(),
clusterID,
};
createChaosWorkFlow({
variables: { ChaosWorkFlowInput: chaosWorkFlowInputs },
variables: { request: chaosWorkFlowInputs },
});
}
};
@ -415,7 +415,7 @@ const VerifyCommit = forwardRef(
</Typography>
<Typography className={classes.right}>
{clustername}
{clusterName}
</Typography>
</div>
<div className={classes.itemWrapper}>

View File

@ -66,12 +66,12 @@ const WorkflowSettings = forwardRef((_, ref) => {
fetchPolicy: 'cache-and-network',
onCompleted: (data) => {
if (data.getHubExperiment !== undefined) {
setName(data.getHubExperiment.Metadata.Name.toLowerCase());
setDescription(data.getHubExperiment.Spec.CategoryDescription);
setName(data.getHubExperiment.metadata.name.toLowerCase());
setDescription(data.getHubExperiment.spec.categoryDescription);
setIcon(
`${config.grahqlEndpoint}/icon/${projectID}/${hubName}/predefined/${data.getHubExperiment.Metadata.Name}.png`
`${config.grahqlEndpoint}/icon/${projectID}/${hubName}/predefined/${data.getHubExperiment.metadata.name}.png`
);
setCRDLink(data.getHubExperiment.Metadata.Name);
setCRDLink(data.getHubExperiment.metadata.name);
}
},
}
@ -80,12 +80,12 @@ const WorkflowSettings = forwardRef((_, ref) => {
const [getSavedTemplateDetails] = useLazyQuery(GET_TEMPLATE_BY_ID, {
fetchPolicy: 'network-only',
onCompleted: (data) => {
if (data.GetTemplateManifestByID !== undefined) {
setName(data.GetTemplateManifestByID.template_name);
setDescription(data.GetTemplateManifestByID.template_description);
if (data.getWorkflowManifestByID !== undefined) {
setName(data.getWorkflowManifestByID.templateName);
setDescription(data.getWorkflowManifestByID.templateDescription);
setIcon('./avatars/litmus.svg');
setCRDLink(data.GetTemplateManifestByID.template_id);
const savedTemplate = data.GetTemplateManifestByID.manifest;
setCRDLink(data.getWorkflowManifestByID.template_id);
const savedTemplate = data.getWorkflowManifestByID.manifest;
if (parsed(savedTemplate).length === 0) {
workflowAction.setWorkflowManifest({
manifest: savedTemplate,
@ -144,12 +144,12 @@ const WorkflowSettings = forwardRef((_, ref) => {
setHubName(hub as string);
getWorkflowDetails({
variables: {
data: {
HubName: hub as string,
ProjectID: projectID,
ChartName: 'predefined',
ExperimentName: (value as ChooseWorkflowRadio).id,
FileType: 'CSV',
request: {
hubName: hub as string,
projectID,
chartName: 'predefined',
experimentName: (value as ChooseWorkflowRadio).id,
fileType: 'CSV',
},
},
});
@ -161,7 +161,7 @@ const WorkflowSettings = forwardRef((_, ref) => {
getSavedTemplateDetails({
variables: {
projectID: getProjectID(),
data: (value as ChooseWorkflowRadio).id,
templateID: (value as ChooseWorkflowRadio).id,
},
});
setDisplayRegChange(true);

View File

@ -72,7 +72,7 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
});
history.push({
pathname: `/workflows/${data.workflow_run_id}`,
pathname: `/workflows/${data.workflowRunID}`,
search: `?projectID=${projectID}&projectRole=${projectRole}`,
});
}}
@ -93,10 +93,10 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
<Typography
className={`${classes.testName} ${classes.noWrapProvider}`}
>
{data.workflow_name}
{data.workflowName}
</Typography>
<Typography className={classes.hint}>
{data.cluster_name}
{data.clusterName}
</Typography>
</div>
</div>
@ -109,12 +109,12 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
)}
</Typography>
<Typography
className={getResiliencyScoreVariant(data.resiliency_score ?? 0)}
className={getResiliencyScoreVariant(data.resiliencyScore ?? 0)}
>
{data.resiliency_score === undefined ||
data.resiliency_score === null
{data.resiliencyScore === undefined ||
data.resiliencyScore === null
? 'NA'
: `${data.resiliency_score}%`}
: `${data.resiliencyScore}%`}
</Typography>
</div>
@ -127,7 +127,7 @@ const WorkflowRunCard: React.FC<WorkflowRunCardProps> = ({ data }) => {
<Typography
className={`${classes.noWrapProvider} ${classes.lastRunTime}`}
>
{timeDifferenceForDate(data.last_updated)}
{timeDifferenceForDate(data.lastUpdated)}
</Typography>
</div>
</div>

View File

@ -13,7 +13,7 @@ import { WORKFLOW_DETAILS } from '../../../graphql';
import { Role } from '../../../models/graphql/user';
import {
Workflow,
WorkflowDataVars,
WorkflowDataRequest,
} from '../../../models/graphql/workflowData';
import useActions from '../../../redux/actions';
import * as TabActions from '../../../redux/actions/tabs';
@ -50,12 +50,12 @@ const AgentConfiguredHome: React.FC<AgentConfiguredHomeProps> = ({
setModalOpen(true);
};
const { data, loading, error } = useQuery<Workflow, WorkflowDataVars>(
const { data, loading, error } = useQuery<Workflow, WorkflowDataRequest>(
WORKFLOW_DETAILS,
{
variables: {
workflowRunsInput: {
project_id: projectID,
request: {
projectID,
pagination: {
page: 0,
limit: 3,
@ -66,7 +66,7 @@ const AgentConfiguredHome: React.FC<AgentConfiguredHomeProps> = ({
}
);
const workflowRunCount = data?.getWorkflowRuns.total_no_of_workflow_runs ?? 0;
const workflowRunCount = data?.listWorkflowRuns.totalNoOfWorkflowRuns ?? 0;
if (error) {
console.error('Error fetching Workflow Data');
@ -112,9 +112,9 @@ const AgentConfiguredHome: React.FC<AgentConfiguredHomeProps> = ({
'homeViews.agentConfiguredHome.recentWorkflowRuns.schedule'
)}
>
{data?.getWorkflowRuns.workflow_runs.map((workflow) => {
{data?.listWorkflowRuns.workflowRuns.map((workflow) => {
return (
<WorkflowRunCard key={workflow.workflow_run_id} data={workflow} />
<WorkflowRunCard key={workflow.workflowRunID} data={workflow} />
);
})}
</RecentOverviewContainer>
@ -195,7 +195,7 @@ const AgentConfiguredHome: React.FC<AgentConfiguredHomeProps> = ({
/>
{/* Project Level info container */}
{projectRole === Role.owner && <ProjectInfoContainer />}
{projectRole === Role.OWNER && <ProjectInfoContainer />}
</div>
);
};

View File

@ -13,7 +13,7 @@ const LandingHome: React.FC = () => {
{/* Agent Deployment Container */}
<UnconfiguredAgent />
{/* Project Level info container */}
{projectRole === Role.owner && <ProjectInfoContainer />}
{projectRole === Role.OWNER && <ProjectInfoContainer />}
</div>
);
};

Some files were not shown because too many files have changed in this diff Show More