From 32918ba5532c8044d3a12c5baf3fb6f696b71bb6 Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Thu, 22 Feb 2024 10:09:51 +0800 Subject: [PATCH 01/38] fix: find correct retry node when using `templateRef`. Fixes: #12633 (#12683) Signed-off-by: shuangkun Signed-off-by: Isitha Subasinghe --- workflow/controller/retry_tweak.go | 19 +++++++++++++++---- workflow/controller/retry_tweak_test.go | 23 +++++++++++++++++++++++ 2 files changed, 38 insertions(+), 4 deletions(-) diff --git a/workflow/controller/retry_tweak.go b/workflow/controller/retry_tweak.go index e8e10a5f87fd..a2d1d3baceed 100644 --- a/workflow/controller/retry_tweak.go +++ b/workflow/controller/retry_tweak.go @@ -15,12 +15,23 @@ type RetryTweak = func(retryStrategy wfv1.RetryStrategy, nodes wfv1.Nodes, pod * func FindRetryNode(nodes wfv1.Nodes, nodeID string) *wfv1.NodeStatus { boundaryID := nodes[nodeID].BoundaryID boundaryNode := nodes[boundaryID] - templateName := boundaryNode.TemplateName - for _, node := range nodes { - if node.Type == wfv1.NodeTypeRetry && node.TemplateName == templateName { - return &node + if boundaryNode.TemplateName != "" { + templateName := boundaryNode.TemplateName + for _, node := range nodes { + if node.Type == wfv1.NodeTypeRetry && node.TemplateName == templateName { + return &node + } } } + if boundaryNode.TemplateRef != nil { + templateRef := boundaryNode.TemplateRef + for _, node := range nodes { + if node.Type == wfv1.NodeTypeRetry && node.TemplateRef != nil && node.TemplateRef.Name == templateRef.Name && node.TemplateRef.Template == templateRef.Template { + return &node + } + } + } + return nil } diff --git a/workflow/controller/retry_tweak_test.go b/workflow/controller/retry_tweak_test.go index 77403c146bbf..66e5d9d106c7 100644 --- a/workflow/controller/retry_tweak_test.go +++ b/workflow/controller/retry_tweak_test.go @@ -59,6 +59,25 @@ func TestFindRetryNode(t *testing.T) { Children: []string{}, TemplateName: "tmpl2", }, + "E1": wfv1.NodeStatus{ + ID: "E1", + Type: wfv1.NodeTypeRetry, + Phase: wfv1.NodeRunning, + BoundaryID: "A1", + Children: []string{}, + TemplateRef: &wfv1.TemplateRef{ + Name: "tmpl1", + Template: "tmpl3", + }, + }, + "E2": wfv1.NodeStatus{ + ID: "E2", + Type: wfv1.NodeTypePod, + Phase: wfv1.NodeRunning, + BoundaryID: "E1", + Children: []string{}, + TemplateName: "tmpl2", + }, } t.Run("Expect to find retry node", func(t *testing.T) { node := allNodes["B2"] @@ -68,4 +87,8 @@ func TestFindRetryNode(t *testing.T) { a := FindRetryNode(allNodes, "A1") assert.Nil(t, a) }) + t.Run("Expect to find retry node has TemplateRef", func(t *testing.T) { + node := allNodes["E1"] + assert.Equal(t, FindRetryNode(allNodes, "E2"), &node) + }) } From 3d4a2cbd6d7d4a0829d7f6ef8e46788c6e244489 Mon Sep 17 00:00:00 2001 From: panicboat Date: Tue, 20 Feb 2024 17:13:30 +0900 Subject: [PATCH 02/38] fix: Add limit to number of Workflows in CronWorkflow history (#12681) Signed-off-by: panicboat Co-authored-by: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Signed-off-by: Isitha Subasinghe --- .../components/cron-workflow-details/cron-workflow-details.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/src/app/cron-workflows/components/cron-workflow-details/cron-workflow-details.tsx b/ui/src/app/cron-workflows/components/cron-workflow-details/cron-workflow-details.tsx index 89b13b5e0e3a..d1d2a5ac9172 100644 --- a/ui/src/app/cron-workflows/components/cron-workflow-details/cron-workflow-details.tsx +++ b/ui/src/app/cron-workflows/components/cron-workflow-details/cron-workflow-details.tsx @@ -71,7 +71,7 @@ export function CronWorkflowDetails({match, location, history}: RouteComponentPr useEffect(() => { (async () => { - const workflowList = await services.workflows.list(namespace, null, [`${models.labels.cronWorkflow}=${name}`], null); + const workflowList = await services.workflows.list(namespace, null, [`${models.labels.cronWorkflow}=${name}`], {limit: 50}); const workflowsInfo = await services.info.getInfo(); setWorkflows(workflowList.items); From ea753f097db03eb057bb54e78d9a8f45b1d924d8 Mon Sep 17 00:00:00 2001 From: jswxstw <385920199@qq.com> Date: Sun, 18 Feb 2024 09:19:25 +0800 Subject: [PATCH 03/38] =?UTF-8?q?fix:=20Patch=20taskset=20with=20subresour?= =?UTF-8?q?ces=20to=20delete=20completed=20node=20status.=E2=80=A6=20(#126?= =?UTF-8?q?20)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: oninowang Co-authored-by: jswxstw Co-authored-by: Isitha Subasinghe Signed-off-by: Isitha Subasinghe --- test/e2e/executor_plugins_test.go | 7 ++++++ test/e2e/fixtures/e2e_suite.go | 3 +++ test/e2e/fixtures/given.go | 2 ++ test/e2e/fixtures/then.go | 13 +++++++++++ test/e2e/fixtures/when.go | 3 +++ workflow/controller/controller.go | 6 ----- workflow/controller/taskset.go | 36 ++++++++++++++--------------- workflow/controller/taskset_test.go | 5 ---- 8 files changed, 46 insertions(+), 29 deletions(-) diff --git a/test/e2e/executor_plugins_test.go b/test/e2e/executor_plugins_test.go index 2506a479149e..081346bf1b74 100644 --- a/test/e2e/executor_plugins_test.go +++ b/test/e2e/executor_plugins_test.go @@ -14,6 +14,7 @@ import ( wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/test/e2e/fixtures" + "github.com/argoproj/argo-workflows/v3/workflow/common" ) type ExecutorPluginsSuite struct { @@ -76,6 +77,12 @@ func (s *ExecutorPluginsSuite) TestTemplateExecutor() { } } } + }). + ExpectWorkflowTaskSet(func(t *testing.T, wfts *wfv1.WorkflowTaskSet) { + assert.NotNil(t, wfts) + assert.Len(t, wfts.Spec.Tasks, 0) + assert.Len(t, wfts.Status.Nodes, 0) + assert.Equal(t, "true", wfts.Labels[common.LabelKeyCompleted]) }) } diff --git a/test/e2e/fixtures/e2e_suite.go b/test/e2e/fixtures/e2e_suite.go index 960526309e7e..f8b06aefa01b 100644 --- a/test/e2e/fixtures/e2e_suite.go +++ b/test/e2e/fixtures/e2e_suite.go @@ -51,6 +51,7 @@ type E2ESuite struct { wfClient v1alpha1.WorkflowInterface wfebClient v1alpha1.WorkflowEventBindingInterface wfTemplateClient v1alpha1.WorkflowTemplateInterface + wftsClient v1alpha1.WorkflowTaskSetInterface cwfTemplateClient v1alpha1.ClusterWorkflowTemplateInterface cronClient v1alpha1.CronWorkflowInterface KubeClient kubernetes.Interface @@ -74,6 +75,7 @@ func (s *E2ESuite) SetupSuite() { s.wfClient = versioned.NewForConfigOrDie(s.RestConfig).ArgoprojV1alpha1().Workflows(Namespace) s.wfebClient = versioned.NewForConfigOrDie(s.RestConfig).ArgoprojV1alpha1().WorkflowEventBindings(Namespace) s.wfTemplateClient = versioned.NewForConfigOrDie(s.RestConfig).ArgoprojV1alpha1().WorkflowTemplates(Namespace) + s.wftsClient = versioned.NewForConfigOrDie(s.RestConfig).ArgoprojV1alpha1().WorkflowTaskSets(Namespace) s.cronClient = versioned.NewForConfigOrDie(s.RestConfig).ArgoprojV1alpha1().CronWorkflows(Namespace) s.Persistence = newPersistence(s.KubeClient, s.Config) s.hydrator = hydrator.New(s.Persistence.offloadNodeStatusRepo) @@ -210,6 +212,7 @@ func (s *E2ESuite) Given() *Given { client: s.wfClient, wfebClient: s.wfebClient, wfTemplateClient: s.wfTemplateClient, + wftsClient: s.wftsClient, cwfTemplateClient: s.cwfTemplateClient, cronClient: s.cronClient, hydrator: s.hydrator, diff --git a/test/e2e/fixtures/given.go b/test/e2e/fixtures/given.go index 863df310a80c..fb6de9cf1b9a 100644 --- a/test/e2e/fixtures/given.go +++ b/test/e2e/fixtures/given.go @@ -23,6 +23,7 @@ type Given struct { client v1alpha1.WorkflowInterface wfebClient v1alpha1.WorkflowEventBindingInterface wfTemplateClient v1alpha1.WorkflowTemplateInterface + wftsClient v1alpha1.WorkflowTaskSetInterface cwfTemplateClient v1alpha1.ClusterWorkflowTemplateInterface cronClient v1alpha1.CronWorkflowInterface hydrator hydrator.Interface @@ -223,6 +224,7 @@ func (g *Given) When() *When { client: g.client, wfebClient: g.wfebClient, wfTemplateClient: g.wfTemplateClient, + wftsClient: g.wftsClient, cwfTemplateClient: g.cwfTemplateClient, cronClient: g.cronClient, hydrator: g.hydrator, diff --git a/test/e2e/fixtures/then.go b/test/e2e/fixtures/then.go index 191f83d2207b..a742880ca0cf 100644 --- a/test/e2e/fixtures/then.go +++ b/test/e2e/fixtures/then.go @@ -28,6 +28,7 @@ type Then struct { wf *wfv1.Workflow cronWf *wfv1.CronWorkflow client v1alpha1.WorkflowInterface + wftsClient v1alpha1.WorkflowTaskSetInterface cronClient v1alpha1.CronWorkflowInterface hydrator hydrator.Interface kubeClient kubernetes.Interface @@ -262,6 +263,17 @@ func (t *Then) ExpectPods(f func(t *testing.T, pods []apiv1.Pod)) *Then { return t } +func (t *Then) ExpectWorkflowTaskSet(block func(t *testing.T, wfts *wfv1.WorkflowTaskSet)) *Then { + t.t.Helper() + ctx := context.Background() + wfts, err := t.wftsClient.Get(ctx, t.wf.Name, metav1.GetOptions{}) + if err != nil { + t.t.Fatal(err) + } + block(t.t, wfts) + return t +} + func (t *Then) RunCli(args []string, block func(t *testing.T, output string, err error)) *Then { t.t.Helper() output, err := Exec("../../dist/argo", append([]string{"-n", Namespace}, args...)...) @@ -273,6 +285,7 @@ func (t *Then) When() *When { return &When{ t: t.t, client: t.client, + wftsClient: t.wftsClient, cronClient: t.cronClient, hydrator: t.hydrator, wf: t.wf, diff --git a/test/e2e/fixtures/when.go b/test/e2e/fixtures/when.go index 0881cd3f9da2..eb3d5dc7c48b 100644 --- a/test/e2e/fixtures/when.go +++ b/test/e2e/fixtures/when.go @@ -33,6 +33,7 @@ type When struct { client v1alpha1.WorkflowInterface wfebClient v1alpha1.WorkflowEventBindingInterface wfTemplateClient v1alpha1.WorkflowTemplateInterface + wftsClient v1alpha1.WorkflowTaskSetInterface cwfTemplateClient v1alpha1.ClusterWorkflowTemplateInterface cronClient v1alpha1.CronWorkflowInterface hydrator hydrator.Interface @@ -621,6 +622,7 @@ func (w *When) Then() *Then { wf: w.wf, cronWf: w.cronWf, client: w.client, + wftsClient: w.wftsClient, cronClient: w.cronClient, hydrator: w.hydrator, kubeClient: w.kubeClient, @@ -634,6 +636,7 @@ func (w *When) Given() *Given { client: w.client, wfebClient: w.wfebClient, wfTemplateClient: w.wfTemplateClient, + wftsClient: w.wftsClient, cwfTemplateClient: w.cwfTemplateClient, cronClient: w.cronClient, hydrator: w.hydrator, diff --git a/workflow/controller/controller.go b/workflow/controller/controller.go index dcd0ad6d7444..b0f8b7d1d409 100644 --- a/workflow/controller/controller.go +++ b/workflow/controller/controller.go @@ -801,12 +801,6 @@ func (wfc *WorkflowController) processNextItem(ctx context.Context) bool { startTime := time.Now() woc.operate(ctx) wfc.metrics.OperationCompleted(time.Since(startTime).Seconds()) - if woc.wf.Status.Fulfilled() { - err := woc.completeTaskSet(ctx) - if err != nil { - log.WithError(err).Warn("error to complete the taskset") - } - } // TODO: operate should return error if it was unable to operate properly // so we can requeue the work for a later time diff --git a/workflow/controller/taskset.go b/workflow/controller/taskset.go index a239ceca1c6c..1aaa14752180 100644 --- a/workflow/controller/taskset.go +++ b/workflow/controller/taskset.go @@ -19,19 +19,19 @@ import ( controllercache "github.com/argoproj/argo-workflows/v3/workflow/controller/cache" ) -func (woc *wfOperationCtx) patchTaskSet(ctx context.Context, patch interface{}, pathTypeType types.PatchType) error { +func (woc *wfOperationCtx) mergePatchTaskSet(ctx context.Context, patch interface{}, subresources ...string) error { patchByte, err := json.Marshal(patch) if err != nil { return errors.InternalWrapError(err) } - _, err = woc.controller.wfclientset.ArgoprojV1alpha1().WorkflowTaskSets(woc.wf.Namespace).Patch(ctx, woc.wf.Name, pathTypeType, patchByte, metav1.PatchOptions{}) + _, err = woc.controller.wfclientset.ArgoprojV1alpha1().WorkflowTaskSets(woc.wf.Namespace).Patch(ctx, woc.wf.Name, types.MergePatchType, patchByte, metav1.PatchOptions{}, subresources...) if err != nil { return fmt.Errorf("failed patching taskset: %v", err) } return nil } -func (woc *wfOperationCtx) getDeleteTaskAndNodePatch() map[string]interface{} { +func (woc *wfOperationCtx) getDeleteTaskAndNodePatch() (tasksPatch map[string]interface{}, nodesPatch map[string]interface{}) { deletedNode := make(map[string]interface{}) for _, node := range woc.wf.Status.Nodes { if (node.Type == wfv1.NodeTypeHTTP || node.Type == wfv1.NodeTypePlugin) && node.Fulfilled() { @@ -40,15 +40,17 @@ func (woc *wfOperationCtx) getDeleteTaskAndNodePatch() map[string]interface{} { } // Delete the completed Tasks and nodes status - patch := map[string]interface{}{ + tasksPatch = map[string]interface{}{ "spec": map[string]interface{}{ "tasks": deletedNode, }, + } + nodesPatch = map[string]interface{}{ "status": map[string]interface{}{ "nodes": deletedNode, }, } - return patch + return } func taskSetNode(n wfv1.NodeStatus) bool { return n.Type == wfv1.NodeTypeHTTP || n.Type == wfv1.NodeTypePlugin @@ -62,20 +64,18 @@ func (woc *wfOperationCtx) removeCompletedTaskSetStatus(ctx context.Context) err if !woc.hasTaskSetNodes() { return nil } - return woc.patchTaskSet(ctx, woc.getDeleteTaskAndNodePatch(), types.MergePatchType) -} - -func (woc *wfOperationCtx) completeTaskSet(ctx context.Context) error { - if !woc.hasTaskSetNodes() { - return nil + tasksPatch, nodesPatch := woc.getDeleteTaskAndNodePatch() + if woc.wf.Status.Fulfilled() { + tasksPatch["metadata"] = metav1.ObjectMeta{ + Labels: map[string]string{ + common.LabelKeyCompleted: "true", + }, + } } - patch := woc.getDeleteTaskAndNodePatch() - patch["metadata"] = metav1.ObjectMeta{ - Labels: map[string]string{ - common.LabelKeyCompleted: "true", - }, + if err := woc.mergePatchTaskSet(ctx, nodesPatch, "status"); err != nil { + return err } - return woc.patchTaskSet(ctx, patch, types.MergePatchType) + return woc.mergePatchTaskSet(ctx, tasksPatch) } func (woc *wfOperationCtx) getWorkflowTaskSet() (*wfv1.WorkflowTaskSet, error) { @@ -202,7 +202,7 @@ func (woc *wfOperationCtx) createTaskSet(ctx context.Context) error { "spec": wfv1.WorkflowTaskSetSpec{Tasks: woc.taskSet}, } // patch the new templates into taskset - err = woc.patchTaskSet(ctx, spec, types.MergePatchType) + err = woc.mergePatchTaskSet(ctx, spec) if err != nil { woc.log.WithError(err).Error("Failed to patch WorkflowTaskSet") return fmt.Errorf("failed to patch TaskSet. %v", err) diff --git a/workflow/controller/taskset_test.go b/workflow/controller/taskset_test.go index 0090377284bc..5d8255179580 100644 --- a/workflow/controller/taskset_test.go +++ b/workflow/controller/taskset_test.go @@ -323,11 +323,6 @@ func TestNonHTTPTemplateScenario(t *testing.T) { err := woc.reconcileTaskSet(ctx) assert.NoError(t, err) }) - t.Run("completeTaskSet", func(t *testing.T) { - woc.operate(ctx) - err := woc.completeTaskSet(ctx) - assert.NoError(t, err) - }) t.Run("removeCompletedTaskSetStatus", func(t *testing.T) { woc.operate(ctx) err := woc.removeCompletedTaskSetStatus(ctx) From 8207a08900b9e7433d5ae939c44a08c065db5f7b Mon Sep 17 00:00:00 2001 From: jiangjiang <86391540+googs1025@users.noreply.github.com> Date: Sat, 17 Feb 2024 14:26:44 +0800 Subject: [PATCH 04/38] fix(typo): fix some typo (#12673) Signed-off-by: googs1025 Co-authored-by: Julie Vogelman Signed-off-by: Isitha Subasinghe --- server/auth/types/claims.go | 6 +++--- server/auth/types/claims_test.go | 4 ++-- server/workflow/workflow_server.go | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/server/auth/types/claims.go b/server/auth/types/claims.go index 5b2f3ec237ea..f4d7c8382cc0 100644 --- a/server/auth/types/claims.go +++ b/server/auth/types/claims.go @@ -61,19 +61,19 @@ func (c *Claims) UnmarshalJSON(data []byte) error { func (c *Claims) GetCustomGroup(customKeyName string) ([]string, error) { groups, ok := c.RawClaim[customKeyName] if !ok { - return nil, fmt.Errorf("No claim found for key: %v", customKeyName) + return nil, fmt.Errorf("no claim found for key: %v", customKeyName) } sliceInterface, ok := groups.([]interface{}) if !ok { - return nil, fmt.Errorf("Expected an array, got %v", groups) + return nil, fmt.Errorf("expected an array, got %v", groups) } newSlice := []string{} for _, a := range sliceInterface { val, ok := a.(string) if !ok { - return nil, fmt.Errorf("Group name %v was not a string", a) + return nil, fmt.Errorf("group name %v was not a string", a) } newSlice = append(newSlice, val) } diff --git a/server/auth/types/claims_test.go b/server/auth/types/claims_test.go index bc49413aae57..047131e6ce01 100644 --- a/server/auth/types/claims_test.go +++ b/server/auth/types/claims_test.go @@ -159,7 +159,7 @@ func TestGetCustomGroup(t *testing.T) { claims := &Claims{} _, err := claims.GetCustomGroup(("ad_groups")) if assert.Error(t, err) { - assert.EqualError(t, err, "No claim found for key: ad_groups") + assert.EqualError(t, err, "no claim found for key: ad_groups") } }) t.Run("CustomGroupSet", func(t *testing.T) { @@ -187,7 +187,7 @@ func TestGetCustomGroup(t *testing.T) { }} _, err := claims.GetCustomGroup(("ad_groups")) if assert.Error(t, err) { - assert.EqualError(t, err, "Group name 0 was not a string") + assert.EqualError(t, err, "group name 0 was not a string") } }) t.Run("CustomGroupNotSlice", func(t *testing.T) { diff --git a/server/workflow/workflow_server.go b/server/workflow/workflow_server.go index 88fce952bafe..16fcc3b8c7d3 100644 --- a/server/workflow/workflow_server.go +++ b/server/workflow/workflow_server.go @@ -335,7 +335,7 @@ func (s *workflowServer) WatchEvents(req *workflowpkg.WatchEventsRequest, ws wor log.Debug("Received event") e, ok := event.Object.(*corev1.Event) if !ok { - // object is probably probably metav1.Status, `FromObject` can deal with anything + // object is probably metav1.Status, `FromObject` can deal with anything return sutils.ToStatusError(apierr.FromObject(event.Object), codes.Internal) } log.Debug("Sending event") @@ -665,7 +665,7 @@ func getLatestWorkflow(ctx context.Context, wfClient versioned.Interface, namesp return nil, sutils.ToStatusError(err, codes.Internal) } if len(wfList.Items) < 1 { - return nil, sutils.ToStatusError(fmt.Errorf("No workflows found."), codes.NotFound) + return nil, sutils.ToStatusError(fmt.Errorf("no workflows found"), codes.NotFound) } latest := wfList.Items[0] for _, wf := range wfList.Items { From 715791b17bc92e3880f14fffea020ecb5af44d85 Mon Sep 17 00:00:00 2001 From: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Date: Sat, 17 Feb 2024 01:23:38 -0500 Subject: [PATCH 05/38] fix(ui): `ListWatch` should not _both_ set and depend on `nextOffset` (#12672) Signed-off-by: Anton Gilgur Signed-off-by: Isitha Subasinghe --- ui/src/app/shared/components/pagination-panel.tsx | 2 +- .../workflows/components/workflows-list/workflows-list.tsx | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ui/src/app/shared/components/pagination-panel.tsx b/ui/src/app/shared/components/pagination-panel.tsx index 8c21a92cf21d..b7e2e30066b1 100644 --- a/ui/src/app/shared/components/pagination-panel.tsx +++ b/ui/src/app/shared/components/pagination-panel.tsx @@ -39,7 +39,7 @@ export function PaginationPanel(props: {pagination: Pagination; onChange: (pagin // we should not skip any by setting an offset. // The offset must be initialized whenever the pagination limit is changed. if (limit) { - newValue.offset = ''; + newValue.offset = undefined; } props.onChange(newValue); diff --git a/ui/src/app/workflows/components/workflows-list/workflows-list.tsx b/ui/src/app/workflows/components/workflows-list/workflows-list.tsx index bf951f927ca3..a44d849d9e14 100644 --- a/ui/src/app/workflows/components/workflows-list/workflows-list.tsx +++ b/ui/src/app/workflows/components/workflows-list/workflows-list.tsx @@ -54,9 +54,9 @@ export function WorkflowsList({match, location, history}: RouteComponentProps(() => { - const savedPaginationLimit = storage.getItem('options', {}).paginationLimit || 0; + const savedPaginationLimit = storage.getItem('options', {}).paginationLimit || undefined; return { - offset: queryParams.get('name'), + offset: queryParams.get('offset') || undefined, limit: parseLimit(queryParams.get('limit')) || savedPaginationLimit || 50 }; }); @@ -155,7 +155,7 @@ export function WorkflowsList({match, location, history}: RouteComponentProps Date: Fri, 16 Feb 2024 23:22:57 -0500 Subject: [PATCH 06/38] fix(controller): re-allow changing executor `args` (#12609) Signed-off-by: Anton Gilgur Signed-off-by: Isitha Subasinghe --- workflow/controller/workflowpod.go | 1 + workflow/controller/workflowpod_test.go | 24 ++++++++++++++++++++++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/workflow/controller/workflowpod.go b/workflow/controller/workflowpod.go index b41593402994..c53d32b2303f 100644 --- a/workflow/controller/workflowpod.go +++ b/workflow/controller/workflowpod.go @@ -604,6 +604,7 @@ func (woc *wfOperationCtx) newExecContainer(name string, tmpl *wfv1.Template) *a Env: woc.createEnvVars(), Resources: woc.controller.Config.GetExecutor().Resources, SecurityContext: woc.controller.Config.GetExecutor().SecurityContext, + Args: woc.controller.Config.GetExecutor().Args, } // lock down resource pods by default if tmpl.GetType() == wfv1.TemplateTypeResource && exec.SecurityContext == nil { diff --git a/workflow/controller/workflowpod_test.go b/workflow/controller/workflowpod_test.go index 2a5d2344a4cb..04e75598f0a7 100644 --- a/workflow/controller/workflowpod_test.go +++ b/workflow/controller/workflowpod_test.go @@ -326,8 +326,8 @@ func TestTmplLevelExecutorServiceAccountName(t *testing.T) { verifyServiceAccountTokenVolumeMount(t, waitCtr, "exec-sa-token", "/var/run/secrets/kubernetes.io/serviceaccount") } -// TestTmplLevelExecutorServiceAccountName verifies the ability to carry forward template level AutomountServiceAccountToken to Podspec. -func TestTmplLevelExecutorSecurityContext(t *testing.T) { +// TestCtrlLevelExecutorSecurityContext verifies the ability to carry forward Controller level SecurityContext to Podspec. +func TestCtrlLevelExecutorSecurityContext(t *testing.T) { var user int64 = 1000 ctx := context.Background() woc := newWoc() @@ -1490,6 +1490,26 @@ func TestMainContainerCustomization(t *testing.T) { }) } +func TestExecutorContainerCustomization(t *testing.T) { + woc := newWoc() + woc.controller.Config.Executor = &apiv1.Container{ + Args: []string{"foo"}, + Resources: apiv1.ResourceRequirements{ + Limits: apiv1.ResourceList{ + apiv1.ResourceCPU: resource.MustParse("0.900"), + apiv1.ResourceMemory: resource.MustParse("512Mi"), + }, + }, + } + + pod, err := woc.createWorkflowPod(context.Background(), "", nil, &wfv1.Template{}, &createWorkflowPodOpts{}) + assert.NoError(t, err) + waitCtr := pod.Spec.Containers[0] + assert.Equal(t, []string{"foo"}, waitCtr.Args) + assert.Equal(t, "0.900", waitCtr.Resources.Limits.Cpu().AsDec().String()) + assert.Equal(t, "536870912", waitCtr.Resources.Limits.Memory().AsDec().String()) +} + var helloWindowsWf = ` apiVersion: argoproj.io/v1alpha1 kind: Workflow From c95c6abc510a42dbae2bb8e929589cfb99c811f4 Mon Sep 17 00:00:00 2001 From: Ryan Currah Date: Thu, 15 Feb 2024 11:32:06 -0500 Subject: [PATCH 07/38] fix(controller): add missing namespace index from workflow informer (#12666) Signed-off-by: Ryan Currah Signed-off-by: Isitha Subasinghe --- workflow/controller/controller.go | 1 + 1 file changed, 1 insertion(+) diff --git a/workflow/controller/controller.go b/workflow/controller/controller.go index b0f8b7d1d409..9b4555faa209 100644 --- a/workflow/controller/controller.go +++ b/workflow/controller/controller.go @@ -254,6 +254,7 @@ var indexers = cache.Indexers{ indexes.WorkflowPhaseIndex: indexes.MetaWorkflowPhaseIndexFunc(), indexes.ConditionsIndex: indexes.ConditionsIndexFunc, indexes.UIDIndex: indexes.MetaUIDFunc, + cache.NamespaceIndex: cache.MetaNamespaceIndexFunc, } // Run starts an Workflow resource controller From 9b69363ba62fa76ac994c1d8542904b4fd331d53 Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Wed, 14 Feb 2024 09:42:26 +0800 Subject: [PATCH 08/38] fix: retry node with expression status Running -> Pending (#12637) Signed-off-by: shuangkun Signed-off-by: Isitha Subasinghe --- workflow/controller/operator.go | 2 +- workflow/controller/operator_test.go | 80 ++++++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 1 deletion(-) diff --git a/workflow/controller/operator.go b/workflow/controller/operator.go index f5cf3a428895..8113de7a6ca6 100644 --- a/workflow/controller/operator.go +++ b/workflow/controller/operator.go @@ -948,7 +948,7 @@ func (woc *wfOperationCtx) processNodeRetries(node *wfv1.NodeStatus, retryStrate if err != nil { return nil, false, err } - if !shouldContinue { + if !shouldContinue && lastChildNode.Fulfilled() { return woc.markNodePhase(node.Name, lastChildNode.Phase, "retryStrategy.expression evaluated to false"), true, nil } } diff --git a/workflow/controller/operator_test.go b/workflow/controller/operator_test.go index c2922a20ff7b..4385043d6006 100644 --- a/workflow/controller/operator_test.go +++ b/workflow/controller/operator_test.go @@ -876,6 +876,86 @@ func TestProcessNodeRetriesWithExponentialBackoff(t *testing.T) { require.Equal(wfv1.NodeSucceeded, n.Phase) } +// TestProcessNodeRetries tests retrying with Expression +func TestProcessNodeRetriesWithExpression(t *testing.T) { + cancel, controller := newController() + defer cancel() + assert.NotNil(t, controller) + wf := wfv1.MustUnmarshalWorkflow(helloWorldWf) + assert.NotNil(t, wf) + woc := newWorkflowOperationCtx(wf, controller) + assert.NotNil(t, woc) + // Verify that there are no nodes in the wf status. + assert.Zero(t, len(woc.wf.Status.Nodes)) + + // Add the parent node for retries. + nodeName := "test-node" + nodeID := woc.wf.NodeID(nodeName) + node := woc.initializeNode(nodeName, wfv1.NodeTypeRetry, "", &wfv1.WorkflowStep{}, "", wfv1.NodeRunning, &wfv1.NodeFlag{}) + retries := wfv1.RetryStrategy{} + retries.Expression = "false" + retries.Limit = intstrutil.ParsePtr("2") + retries.RetryPolicy = wfv1.RetryPolicyAlways + woc.wf.Status.Nodes[nodeID] = *node + + assert.Equal(t, node.Phase, wfv1.NodeRunning) + + // Ensure there are no child nodes yet. + lastChild := getChildNodeIndex(node, woc.wf.Status.Nodes, -1) + assert.Nil(t, lastChild) + + // Add child nodes. + for i := 0; i < 2; i++ { + childNode := fmt.Sprintf("%s(%d)", nodeName, i) + woc.initializeNode(childNode, wfv1.NodeTypePod, "", &wfv1.WorkflowStep{}, "", wfv1.NodeRunning, &wfv1.NodeFlag{Retried: true}) + woc.addChildNode(nodeName, childNode) + } + + n, err := woc.wf.GetNodeByName(nodeName) + assert.NoError(t, err) + lastChild = getChildNodeIndex(n, woc.wf.Status.Nodes, -1) + assert.NotNil(t, lastChild) + + // Last child is still running. processNodeRetries() should return false since + // there should be no retries at this point. + n, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{}) + assert.NoError(t, err) + assert.Equal(t, n.Phase, wfv1.NodeRunning) + + // Mark lastChild Pending. + woc.markNodePhase(lastChild.Name, wfv1.NodePending) + n, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{}) + assert.Nil(t, err) + assert.Equal(t, n.Phase, wfv1.NodeRunning) + + // Mark lastChild as successful. + woc.markNodePhase(lastChild.Name, wfv1.NodeSucceeded) + n, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{}) + assert.Nil(t, err) + // The parent node also gets marked as Succeeded. + assert.Equal(t, n.Phase, wfv1.NodeSucceeded) + + // Mark the parent node as running again and the lastChild as errored. + n = woc.markNodePhase(n.Name, wfv1.NodeRunning) + woc.markNodePhase(lastChild.Name, wfv1.NodeError) + _, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{}) + assert.NoError(t, err) + n, err = woc.wf.GetNodeByName(nodeName) + assert.NoError(t, err) + assert.Equal(t, n.Phase, wfv1.NodeError) + + // Add a third node that has failed. + woc.markNodePhase(n.Name, wfv1.NodeRunning) + childNode := fmt.Sprintf("%s(%d)", nodeName, 3) + woc.initializeNode(childNode, wfv1.NodeTypePod, "", &wfv1.WorkflowStep{}, "", wfv1.NodeFailed, &wfv1.NodeFlag{Retried: true}) + woc.addChildNode(nodeName, childNode) + n, err = woc.wf.GetNodeByName(nodeName) + assert.NoError(t, err) + n, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{}) + assert.NoError(t, err) + assert.Equal(t, n.Phase, wfv1.NodeFailed) +} + func parseRetryMessage(message string) (int, error) { pattern := regexp.MustCompile(`Backoff for (\d+) minutes (\d+) seconds`) matches := pattern.FindStringSubmatch(message) From 93f0b6ebd6757c2f4957cbe151061c7848e68d57 Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Fri, 9 Feb 2024 12:00:04 +0800 Subject: [PATCH 09/38] fix: pass through burst and qps for auth.kubeclient (#12575) Signed-off-by: shuangkun Signed-off-by: Isitha Subasinghe --- server/auth/gatekeeper.go | 17 +++++++++++++---- server/auth/gatekeeper_test.go | 18 +++++++++--------- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/server/auth/gatekeeper.go b/server/auth/gatekeeper.go index 011dd6bcb98e..315a87f490c3 100644 --- a/server/auth/gatekeeper.go +++ b/server/auth/gatekeeper.go @@ -55,7 +55,7 @@ type Gatekeeper interface { StreamServerInterceptor() grpc.StreamServerInterceptor } -type ClientForAuthorization func(authorization string) (*rest.Config, *servertypes.Clients, error) +type ClientForAuthorization func(authorization string, config *rest.Config) (*rest.Config, *servertypes.Clients, error) type gatekeeper struct { Modes Modes @@ -194,7 +194,7 @@ func (s gatekeeper) getClients(ctx context.Context, req interface{}) (*servertyp } switch mode { case Client: - restConfig, clients, err := s.clientForAuthorization(authorization) + restConfig, clients, err := s.clientForAuthorization(authorization, s.restConfig) if err != nil { return nil, nil, status.Error(codes.Unauthenticated, err.Error()) } @@ -286,7 +286,7 @@ func (s *gatekeeper) getClientsForServiceAccount(ctx context.Context, claims *ty if err != nil { return nil, err } - _, clients, err := s.clientForAuthorization(authorization) + _, clients, err := s.clientForAuthorization(authorization, s.restConfig) if err != nil { return nil, err } @@ -337,11 +337,12 @@ func addClaimsLogFields(claims *types.Claims, fields log.Fields) log.Fields { return fields } -func DefaultClientForAuthorization(authorization string) (*rest.Config, *servertypes.Clients, error) { +func DefaultClientForAuthorization(authorization string, config *rest.Config) (*rest.Config, *servertypes.Clients, error) { restConfig, err := kubeconfig.GetRestConfig(authorization) if err != nil { return nil, nil, fmt.Errorf("failed to create REST config: %w", err) } + restConfig = mergeServerRestConfig(config, restConfig) dynamicClient, err := dynamic.NewForConfig(restConfig) if err != nil { return nil, nil, fmt.Errorf("failure to create dynamic client: %w", err) @@ -370,3 +371,11 @@ func DefaultClientForAuthorization(authorization string) (*rest.Config, *servert Kubernetes: kubeClient, }, nil } + +func mergeServerRestConfig(argoServerConfig *rest.Config, newConfig *rest.Config) *rest.Config { + newConfig.Burst = argoServerConfig.Burst + newConfig.QPS = argoServerConfig.QPS + newConfig.UserAgent = argoServerConfig.UserAgent + // TO DO: Merge other common configurations,such as RateLimiter. + return newConfig +} diff --git a/server/auth/gatekeeper_test.go b/server/auth/gatekeeper_test.go index b4c620a0392b..27d9f47db76a 100644 --- a/server/auth/gatekeeper_test.go +++ b/server/auth/gatekeeper_test.go @@ -107,7 +107,7 @@ func TestServer_GetWFClient(t *testing.T) { ) resourceCache := cache.NewResourceCache(kubeClient, corev1.NamespaceAll) resourceCache.Run(context.TODO().Done()) - var clientForAuthorization ClientForAuthorization = func(authorization string) (*rest.Config, *servertypes.Clients, error) { + var clientForAuthorization ClientForAuthorization = func(authorization string, config *rest.Config) (*rest.Config, *servertypes.Clients, error) { return &rest.Config{}, &servertypes.Clients{Workflow: &fakewfclientset.Clientset{}, Kubernetes: &kubefake.Clientset{}}, nil } clients := &servertypes.Clients{Workflow: wfClient, Kubernetes: kubeClient} @@ -153,7 +153,7 @@ func TestServer_GetWFClient(t *testing.T) { ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{Claims: jwt.Claims{Subject: "my-sub"}}, nil) ssoIf.On("IsRBACEnabled").Return(false) - g, err := NewGatekeeper(Modes{SSO: true}, clients, nil, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) + g, err := NewGatekeeper(Modes{SSO: true}, clients, &rest.Config{Username: "my-username"}, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) if assert.NoError(t, err) { ctx, err := g.Context(x("Bearer v2:whatever")) if assert.NoError(t, err) { @@ -172,7 +172,7 @@ func TestServer_GetWFClient(t *testing.T) { ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{Groups: []string{"my-group", "other-group"}}, nil) ssoIf.On("IsRBACEnabled").Return(true) - g, err := NewGatekeeper(Modes{SSO: true}, clients, nil, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) + g, err := NewGatekeeper(Modes{SSO: true}, clients, &rest.Config{Username: "my-username"}, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) if assert.NoError(t, err) { ctx, err := g.Context(x("Bearer v2:whatever")) if assert.NoError(t, err) { @@ -193,7 +193,7 @@ func TestServer_GetWFClient(t *testing.T) { ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{Groups: []string{"my-group", "other-group"}}, nil) ssoIf.On("IsRBACEnabled").Return(true) - g, err := NewGatekeeper(Modes{SSO: true}, clients, nil, ssoIf, clientForAuthorization, "my-ns", "my-ns", false, resourceCache) + g, err := NewGatekeeper(Modes{SSO: true}, clients, &rest.Config{Username: "my-username"}, ssoIf, clientForAuthorization, "my-ns", "my-ns", false, resourceCache) if assert.NoError(t, err) { ctx, err := g.ContextWithRequest(x("Bearer v2:whatever"), servertypes.NamespaceHolder("user1-ns")) if assert.NoError(t, err) { @@ -214,7 +214,7 @@ func TestServer_GetWFClient(t *testing.T) { ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{Groups: []string{"my-group", "other-group"}}, nil) ssoIf.On("IsRBACEnabled").Return(true) - g, err := NewGatekeeper(Modes{SSO: true}, clients, nil, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) + g, err := NewGatekeeper(Modes{SSO: true}, clients, &rest.Config{Username: "my-username"}, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) if assert.NoError(t, err) { ctx, err := g.ContextWithRequest(x("Bearer v2:whatever"), servertypes.NamespaceHolder("user1-ns")) if assert.NoError(t, err) { @@ -235,7 +235,7 @@ func TestServer_GetWFClient(t *testing.T) { ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{Groups: []string{"my-group", "other-group"}}, nil) ssoIf.On("IsRBACEnabled").Return(true) - g, err := NewGatekeeper(Modes{SSO: true}, clients, nil, ssoIf, clientForAuthorization, "my-ns", "my-ns", false, resourceCache) + g, err := NewGatekeeper(Modes{SSO: true}, clients, &rest.Config{Username: "my-username"}, ssoIf, clientForAuthorization, "my-ns", "my-ns", false, resourceCache) if assert.NoError(t, err) { ctx, err := g.ContextWithRequest(x("Bearer v2:whatever"), servertypes.NamespaceHolder("user2-ns")) if assert.NoError(t, err) { @@ -257,7 +257,7 @@ func TestServer_GetWFClient(t *testing.T) { ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{Groups: []string{"my-group", "other-group"}}, nil) ssoIf.On("IsRBACEnabled").Return(true) - g, err := NewGatekeeper(Modes{SSO: true}, clients, nil, ssoIf, clientForAuthorization, "my-ns", "my-ns", false, resourceCache) + g, err := NewGatekeeper(Modes{SSO: true}, clients, &rest.Config{Username: "my-username"}, ssoIf, clientForAuthorization, "my-ns", "my-ns", false, resourceCache) if assert.NoError(t, err) { ctx, err := g.ContextWithRequest(x("Bearer v2:whatever"), servertypes.NamespaceHolder("user3-ns")) if assert.NoError(t, err) { @@ -278,7 +278,7 @@ func TestServer_GetWFClient(t *testing.T) { ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{Groups: []string{"other-group"}}, nil) ssoIf.On("IsRBACEnabled").Return(true) - g, err := NewGatekeeper(Modes{SSO: true}, clients, nil, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) + g, err := NewGatekeeper(Modes{SSO: true}, clients, &rest.Config{Username: "my-username"}, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) if assert.NoError(t, err) { ctx, err := g.Context(x("Bearer v2:whatever")) if assert.NoError(t, err) { @@ -291,7 +291,7 @@ func TestServer_GetWFClient(t *testing.T) { ssoIf := &ssomocks.Interface{} ssoIf.On("Authorize", mock.Anything, mock.Anything).Return(&types.Claims{}, nil) ssoIf.On("IsRBACEnabled").Return(true) - g, err := NewGatekeeper(Modes{SSO: true}, clients, nil, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) + g, err := NewGatekeeper(Modes{SSO: true}, clients, &rest.Config{Username: "my-username"}, ssoIf, clientForAuthorization, "my-ns", "my-ns", true, resourceCache) if assert.NoError(t, err) { _, err := g.Context(x("Bearer v2:whatever")) assert.EqualError(t, err, "rpc error: code = PermissionDenied desc = not allowed") From 636f79a8bddea8d021737104bc6d2e4be516e7f4 Mon Sep 17 00:00:00 2001 From: Tianchu Zhao Date: Thu, 8 Feb 2024 14:11:44 +1100 Subject: [PATCH 10/38] fix: artifact subdir error when using volumeMount (#12638) Signed-off-by: Isitha Subasinghe --- test/e2e/artifacts_test.go | 38 +++++++++++++++++++++++++++++++++++ workflow/executor/executor.go | 5 +++++ 2 files changed, 43 insertions(+) diff --git a/test/e2e/artifacts_test.go b/test/e2e/artifacts_test.go index 43b40a742c6d..2d0ff866709c 100644 --- a/test/e2e/artifacts_test.go +++ b/test/e2e/artifacts_test.go @@ -623,6 +623,44 @@ spec: WaitForWorkflow(fixtures.ToBeSucceeded) } +func (s *ArtifactsSuite) TestArtifactEphemeralVolume() { + s.Given(). + Workflow(`apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artifact-volume-claim- +spec: + entrypoint: artifact-volume-claim + volumeClaimTemplates: + - metadata: + name: vol + spec: + accessModes: [ "ReadWriteOnce" ] + resources: + requests: + storage: 1Mi + templates: + - name: artifact-volume-claim + inputs: + artifacts: + - name: artifact-volume-claim + path: /tmp/input/input.txt + raw: + data: abc + container: + image: argoproj/argosay:v2 + command: [sh, -c] + args: ["ls -l"] + workingDir: /tmp + volumeMounts: + - name: vol + mountPath: /tmp +`). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded) +} + func TestArtifactsSuite(t *testing.T) { suite.Run(t, new(ArtifactsSuite)) } diff --git a/workflow/executor/executor.go b/workflow/executor/executor.go index 490db19b6de1..df6f77ce20b5 100644 --- a/workflow/executor/executor.go +++ b/workflow/executor/executor.go @@ -204,6 +204,11 @@ func (we *WorkflowExecutor) LoadArtifacts(ctx context.Context) error { // the file is a tarball or not. If it is, it is first extracted then renamed to // the desired location. If not, it is simply renamed to the location. tempArtPath := artPath + ".tmp" + // Ensure parent directory exist, create if missing + tempArtDir := filepath.Dir(tempArtPath) + if err := os.MkdirAll(tempArtDir, 0o700); err != nil { + return fmt.Errorf("failed to create artifact temporary parent directory %s: %w", tempArtDir, err) + } err = artDriver.Load(driverArt, tempArtPath) if err != nil { if art.Optional && argoerrs.IsCode(argoerrs.CodeNotFound, err) { From 0bffab1dd3971ae1c9adbc4a7c2ceb6969098678 Mon Sep 17 00:00:00 2001 From: Raffael <50194881+nice-pink@users.noreply.github.com> Date: Mon, 5 Feb 2024 20:58:07 +0100 Subject: [PATCH 11/38] fix: Allow valueFrom in dag arguments parameters. Fixes #11900 (#11902) Signed-off-by: nice-pink Signed-off-by: Isitha Subasinghe --- server/workflow/workflow_server_test.go | 2 +- workflow/controller/operator_test.go | 2 +- workflow/validate/validate.go | 20 ++++++-- workflow/validate/validate_dag_test.go | 67 ++++++++++++++++++++++++- workflow/validate/validate_test.go | 7 +-- 5 files changed, 87 insertions(+), 11 deletions(-) diff --git a/server/workflow/workflow_server_test.go b/server/workflow/workflow_server_test.go index d84b49495f92..73a68bb29fe1 100644 --- a/server/workflow/workflow_server_test.go +++ b/server/workflow/workflow_server_test.go @@ -929,7 +929,7 @@ func TestSubmitWorkflowFromResource(t *testing.T) { ResourceKind: "workflowtemplate", ResourceName: "workflow-template-whalesay-template", }) - assert.EqualError(t, err, "rpc error: code = InvalidArgument desc = spec.arguments.message.value is required") + assert.EqualError(t, err, "rpc error: code = InvalidArgument desc = spec.arguments.message.value or spec.arguments.message.valueFrom is required") }) t.Run("SubmitFromWorkflowTemplate", func(t *testing.T) { opts := v1alpha1.SubmitOpts{ diff --git a/workflow/controller/operator_test.go b/workflow/controller/operator_test.go index 4385043d6006..d0f2aebbde63 100644 --- a/workflow/controller/operator_test.go +++ b/workflow/controller/operator_test.go @@ -4531,7 +4531,7 @@ func TestUnsuppliedArgValue(t *testing.T) { woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) assert.Equal(t, woc.wf.Status.Conditions[0].Status, metav1.ConditionStatus("True")) - assert.Equal(t, woc.wf.Status.Message, "invalid spec: spec.arguments.missing.value is required") + assert.Equal(t, woc.wf.Status.Message, "invalid spec: spec.arguments.missing.value or spec.arguments.missing.valueFrom is required") } var suppliedArgValue = ` diff --git a/workflow/validate/validate.go b/workflow/validate/validate.go index 35acaab2ed9d..3c3d8866f20c 100644 --- a/workflow/validate/validate.go +++ b/workflow/validate/validate.go @@ -856,11 +856,24 @@ func validateArgumentsFieldNames(prefix string, arguments wfv1.Arguments) error // validateArgumentsValues ensures that all arguments have parameter values or artifact locations func validateArgumentsValues(prefix string, arguments wfv1.Arguments, allowEmptyValues bool) error { for _, param := range arguments.Parameters { + // check if any value is defined if param.ValueFrom == nil && param.Value == nil { if !allowEmptyValues { - return errors.Errorf(errors.CodeBadRequest, "%s%s.value is required", prefix, param.Name) + return errors.Errorf(errors.CodeBadRequest, "%s%s.value or %s%s.valueFrom is required", prefix, param.Name, prefix, param.Name) + } + } + if param.ValueFrom != nil { + // check for valid valueFrom sub-parameters + // INFO: default needs to be accompanied by ConfigMapKeyRef. + if param.ValueFrom.ConfigMapKeyRef == nil && param.ValueFrom.Event == "" && param.ValueFrom.Supplied == nil { + return errors.Errorf(errors.CodeBadRequest, "%s%s.valueFrom only allows: default, configMapKeyRef and supplied", prefix, param.Name) + } + // check for invalid valueFrom sub-parameters + if param.ValueFrom.Path != "" || param.ValueFrom.JSONPath != "" || param.ValueFrom.Parameter != "" || param.ValueFrom.Expression != "" { + return errors.Errorf(errors.CodeBadRequest, "%s%s.valueFrom only allows: default, configMapKeyRef and supplied", prefix, param.Name) } } + // validate enum if param.Enum != nil { if len(param.Enum) == 0 { return errors.Errorf(errors.CodeBadRequest, "%s%s.enum should contain at least one value", prefix, param.Name) @@ -1423,10 +1436,7 @@ func validateDAGTaskArgumentDependency(arguments wfv1.Arguments, ancestry []stri } for _, param := range arguments.Parameters { - if param.Value == nil { - return errors.Errorf(errors.CodeBadRequest, "missing value for parameter '%s'", param.Name) - } - if strings.HasPrefix(param.Value.String(), "{{tasks.") { + if param.Value != nil && strings.HasPrefix(param.Value.String(), "{{tasks.") { // All parameter values should have been validated, so // index 1 should exist. refTaskName := strings.Split(param.Value.String(), ".")[1] diff --git a/workflow/validate/validate_dag_test.go b/workflow/validate/validate_dag_test.go index fcbb21968da9..1931016f06aa 100644 --- a/workflow/validate/validate_dag_test.go +++ b/workflow/validate/validate_dag_test.go @@ -1075,6 +1075,71 @@ spec: func TestDAGMissingParamValueInTask(t *testing.T) { err := validate(dagMissingParamValueInTask) if assert.NotNil(t, err) { - assert.Contains(t, err.Error(), "templates.root.tasks.task missing value for parameter 'data'") + assert.Contains(t, err.Error(), ".valueFrom only allows: default, configMapKeyRef and supplied") + } +} + +var dagArgParamValueFromConfigMapInTask = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +spec: + entrypoint: root + templates: + - name: template + inputs: + parameters: + - name: data + container: + name: main + image: alpine + - name: root + dag: + tasks: + - name: task + template: template + arguments: + parameters: + - name: data + valueFrom: + configMapKeyRef: + name: my-config + key: my-data + default: my-default +` + +func TestDAGArgParamValueFromConfigMapInTask(t *testing.T) { + err := validate(dagArgParamValueFromConfigMapInTask) + assert.NoError(t, err) +} + +var failDagArgParamValueFromPathInTask = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +spec: + entrypoint: root + templates: + - name: template + inputs: + parameters: + - name: data + container: + name: main + image: alpine + - name: root + dag: + tasks: + - name: task + template: template + arguments: + parameters: + - name: data + valueFrom: + path: /tmp/my-path +` + +func TestFailDAGArgParamValueFromPathInTask(t *testing.T) { + err := validate(failDagArgParamValueFromPathInTask) + if assert.NotNil(t, err) { + assert.Contains(t, err.Error(), "valueFrom only allows: default, configMapKeyRef and supplied") } } diff --git a/workflow/validate/validate_test.go b/workflow/validate/validate_test.go index d48a389c3a41..bae2a6bd630e 100644 --- a/workflow/validate/validate_test.go +++ b/workflow/validate/validate_test.go @@ -641,7 +641,7 @@ func TestGlobalParam(t *testing.T) { assert.NoError(t, err) err = validate(unsuppliedArgValue) - assert.EqualError(t, err, "spec.arguments.missing.value is required") + assert.EqualError(t, err, "spec.arguments.missing.value or spec.arguments.missing.valueFrom is required") } var invalidTemplateNames = ` @@ -1293,7 +1293,8 @@ spec: func TestInvalidArgumentNoValue(t *testing.T) { err := validate(invalidArgumentNoValue) if assert.NotNil(t, err) { - assert.Contains(t, err.Error(), ".value is required") + assert.Contains(t, err.Error(), ".value or ") + assert.Contains(t, err.Error(), ".valueFrom is required") } } @@ -2714,7 +2715,7 @@ func TestWorkflowTemplateWithEnumValueWithoutValue(t *testing.T) { err = validateWorkflowTemplate(workflowTeamplateWithEnumValuesWithoutValue, ValidateOpts{Lint: true}) assert.Nil(t, err) err = validateWorkflowTemplate(workflowTeamplateWithEnumValuesWithoutValue, ValidateOpts{Submit: true}) - assert.EqualError(t, err, "spec.arguments.message.value is required") + assert.EqualError(t, err, "spec.arguments.message.value or spec.arguments.message.valueFrom is required") } var resourceManifestWithExpressions = ` From fcfbfbd0b5a1251e6cd0cb728131604c613dedc3 Mon Sep 17 00:00:00 2001 From: AloysAqemia <135111228+AloysAqemia@users.noreply.github.com> Date: Mon, 5 Feb 2024 19:36:23 +0100 Subject: [PATCH 12/38] fix(resources): improve ressource accounting. Fixes #12468 (#12492) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Aloÿs Augustin Signed-off-by: Isitha Subasinghe --- util/resource/duration_test.go | 2 +- util/resource/summary.go | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/util/resource/duration_test.go b/util/resource/duration_test.go index 0330cb5c2bb9..8cb37836bec7 100644 --- a/util/resource/duration_test.go +++ b/util/resource/duration_test.go @@ -44,7 +44,7 @@ func TestDurationForPod(t *testing.T) { corev1.ResourceCPU: wfv1.NewResourceDuration(2 * time.Minute), corev1.ResourceMemory: wfv1.NewResourceDuration(1 * time.Minute), }}, - {"ContainerWithCPURequest", &corev1.Pod{ + {"ContainerWithGPULimit", &corev1.Pod{ Spec: corev1.PodSpec{Containers: []corev1.Container{{Name: "main", Resources: corev1.ResourceRequirements{ Requests: corev1.ResourceList{ corev1.ResourceCPU: resource.MustParse("2000m"), diff --git a/util/resource/summary.go b/util/resource/summary.go index 56fc5c88c8ef..aa176cf1dd4c 100644 --- a/util/resource/summary.go +++ b/util/resource/summary.go @@ -28,9 +28,14 @@ func (ss Summaries) Duration() wfv1.ResourcesDuration { // Add container states. d := wfv1.ResourcesDuration{} for _, s := range ss { + // age is converted to seconds, otherwise the multiplication below is very likely to overflow age := int64(s.age().Seconds()) for n, q := range s.ResourceList { - d = d.Add(wfv1.ResourcesDuration{n: wfv1.NewResourceDuration(time.Duration(q.Value() * age / wfv1.ResourceQuantityDenominator(n).Value() * int64(time.Second)))}) + d = d.Add(wfv1.ResourcesDuration{ + n: wfv1.NewResourceDuration(time.Duration( + q.MilliValue()*age/wfv1.ResourceQuantityDenominator(n).MilliValue(), + ) * time.Second), + }) } } return d From 2a21d1445df644894f96d0af62d4d7688b93489b Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Fri, 26 Jan 2024 11:16:04 +0800 Subject: [PATCH 13/38] fix: Mark resource && data template report-outputs-completed true (#12544) Signed-off-by: shuangkun Signed-off-by: Isitha Subasinghe --- cmd/argoexec/commands/data.go | 29 +++++++++++++++++++++++++---- cmd/argoexec/commands/resource.go | 9 ++++++++- test/e2e/functional_test.go | 3 +++ test/e2e/resource_template_test.go | 3 +++ 4 files changed, 39 insertions(+), 5 deletions(-) diff --git a/cmd/argoexec/commands/data.go b/cmd/argoexec/commands/data.go index c25daf08645d..a01bb70cd173 100644 --- a/cmd/argoexec/commands/data.go +++ b/cmd/argoexec/commands/data.go @@ -3,6 +3,7 @@ package commands import ( "context" + log "github.com/sirupsen/logrus" "github.com/spf13/cobra" ) @@ -10,11 +11,31 @@ func NewDataCommand() *cobra.Command { command := cobra.Command{ Use: "data", Short: "Process data", - RunE: func(cmd *cobra.Command, args []string) error { - ctx := context.Background() - wfExecutor := initExecutor() - return wfExecutor.Data(ctx) + Run: func(cmd *cobra.Command, args []string) { + ctx := cmd.Context() + err := execData(ctx) + if err != nil { + log.Fatalf("%+v", err) + } }, } return &command } + +func execData(ctx context.Context) error { + wfExecutor := initExecutor() + + // Don't allow cancellation to impact capture of results, parameters, artifacts, or defers. + bgCtx := context.Background() + // Create a new empty (placeholder) task result with LabelKeyReportOutputsCompleted set to false. + wfExecutor.InitializeOutput(bgCtx) + defer wfExecutor.HandleError(bgCtx) + defer wfExecutor.FinalizeOutput(bgCtx) //Ensures the LabelKeyReportOutputsCompleted is set to true. + + err := wfExecutor.Data(ctx) + if err != nil { + wfExecutor.AddError(err) + return err + } + return nil +} diff --git a/cmd/argoexec/commands/resource.go b/cmd/argoexec/commands/resource.go index e5d8fcf32328..68b0583b6378 100644 --- a/cmd/argoexec/commands/resource.go +++ b/cmd/argoexec/commands/resource.go @@ -33,12 +33,19 @@ func NewResourceCommand() *cobra.Command { func execResource(ctx context.Context, action string) error { wfExecutor := initExecutor() - defer wfExecutor.HandleError(ctx) + + // Don't allow cancellation to impact capture of results, parameters, artifacts, or defers. + bgCtx := context.Background() + + wfExecutor.InitializeOutput(bgCtx) + defer wfExecutor.HandleError(bgCtx) + defer wfExecutor.FinalizeOutput(bgCtx) //Ensures the LabelKeyReportOutputsCompleted is set to true. err := wfExecutor.StageFiles() if err != nil { wfExecutor.AddError(err) return err } + isDelete := action == "delete" if isDelete && (wfExecutor.Template.Resource.SuccessCondition != "" || wfExecutor.Template.Resource.FailureCondition != "" || len(wfExecutor.Template.Outputs.Parameters) > 0) { err = fmt.Errorf("successCondition, failureCondition and outputs are not supported for delete action") diff --git a/test/e2e/functional_test.go b/test/e2e/functional_test.go index 30a522287876..76b270394fb1 100644 --- a/test/e2e/functional_test.go +++ b/test/e2e/functional_test.go @@ -908,6 +908,9 @@ func (s *FunctionalSuite) TestDataTransformation() { } assert.NotNil(t, status.Nodes.FindByDisplayName("process-artifact(0:foo/script.py)")) assert.NotNil(t, status.Nodes.FindByDisplayName("process-artifact(1:script.py)")) + for _, value := range status.TaskResultsCompletionStatus { + assert.True(t, value) + } }) } diff --git a/test/e2e/resource_template_test.go b/test/e2e/resource_template_test.go index 2a4916f85b2d..e8f2bbb49e9d 100644 --- a/test/e2e/resource_template_test.go +++ b/test/e2e/resource_template_test.go @@ -159,6 +159,9 @@ func (s *ResourceTemplateSuite) TestResourceTemplateWithOutputs() { assert.Equal(t, "my-pod", parameters[1].Value.String(), "metadata.name is capture for jq") } } + for _, value := range status.TaskResultsCompletionStatus { + assert.True(t, value) + } }) } From 56a59118541d79be7c4b3ba3feb2a67b4f9c900e Mon Sep 17 00:00:00 2001 From: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Date: Tue, 23 Jan 2024 08:52:39 -0500 Subject: [PATCH 14/38] fix(ui): clone the `ListWatch` callback array in `WorkflowsList` (#12562) Signed-off-by: Anton Gilgur Signed-off-by: Isitha Subasinghe --- .../app/workflows/components/workflows-list/workflows-list.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/src/app/workflows/components/workflows-list/workflows-list.tsx b/ui/src/app/workflows/components/workflows-list/workflows-list.tsx index a44d849d9e14..d6d390121aed 100644 --- a/ui/src/app/workflows/components/workflows-list/workflows-list.tsx +++ b/ui/src/app/workflows/components/workflows-list/workflows-list.tsx @@ -145,7 +145,7 @@ export function WorkflowsList({match, location, history}: RouteComponentProps setError(null), - newWorkflows => setWorkflows(newWorkflows), + newWorkflows => setWorkflows([...newWorkflows]), err => setError(err), sortByYouth ); From 0319b79d5e13217e86784f92be67524fed3b8af4 Mon Sep 17 00:00:00 2001 From: Garett MacGowan Date: Tue, 23 Jan 2024 00:55:02 -0500 Subject: [PATCH 15/38] fix: Global Artifact Passing. Fixes #12554 (#12559) Signed-off-by: Garett MacGowan Signed-off-by: Isitha Subasinghe --- test/e2e/argo_server_test.go | 12 +- test/e2e/artifacts_test.go | 87 ++++++++ .../testdata/artifact-workflow-stopped.yaml | 23 +- .../artifactgc/artgc-dag-wf-self-delete.yaml | 99 +++++++++ .../complex-global-artifact-passing.yaml | 205 ++++++++++++++++++ .../e2e/testdata/global-artifact-passing.yaml | 92 ++++++++ workflow/controller/taskresult.go | 2 - 7 files changed, 513 insertions(+), 7 deletions(-) create mode 100644 test/e2e/testdata/artifactgc/artgc-dag-wf-self-delete.yaml create mode 100644 test/e2e/testdata/complex-global-artifact-passing.yaml create mode 100644 test/e2e/testdata/global-artifact-passing.yaml diff --git a/test/e2e/argo_server_test.go b/test/e2e/argo_server_test.go index a969a2f30e7a..9e862ae1bc4f 100644 --- a/test/e2e/argo_server_test.go +++ b/test/e2e/argo_server_test.go @@ -1051,13 +1051,21 @@ func (s *ArgoServerSuite) TestArtifactServerArchivedStoppedWorkflow() { nodeID = status.Nodes.FindByDisplayName("create-artifact").ID }) - s.Run("GetArtifactByNodeID", func() { - s.e().GET("/artifact-files/argo/archived-workflows/{uid}/{nodeID}/outputs/artifact-creator", uid, nodeID). + s.Run("GetLocalArtifactByNodeID", func() { + s.e().GET("/artifact-files/argo/archived-workflows/{uid}/{nodeID}/outputs/local-artifact", uid, nodeID). Expect(). Status(200). Body(). Contains("testing") }) + + s.Run("GetGlobalArtifactByNodeID", func() { + s.e().GET("/artifact-files/argo/archived-workflows/{uid}/{nodeID}/outputs/global-artifact", uid, nodeID). + Expect(). + Status(200). + Body(). + Contains("testing global") + }) } // make sure we can download an artifact diff --git a/test/e2e/artifacts_test.go b/test/e2e/artifacts_test.go index 2d0ff866709c..cd9005d896e8 100644 --- a/test/e2e/artifacts_test.go +++ b/test/e2e/artifacts_test.go @@ -4,9 +4,11 @@ package e2e import ( + "bytes" "context" "fmt" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" @@ -60,6 +62,71 @@ func (s *ArtifactsSuite) TestArtifactPassing() { WaitForWorkflow(fixtures.ToBeSucceeded) } +type expectedArtifact struct { + key string + bucketName string + value string +} + +func (s *ArtifactsSuite) TestGlobalArtifactPassing() { + for _, tt := range []struct { + workflowFile string + expectedArtifact expectedArtifact + }{ + { + workflowFile: "@testdata/global-artifact-passing.yaml", + expectedArtifact: expectedArtifact{ + key: "globalArtifact", + bucketName: "my-bucket-3", + value: "01", + }, + }, + { + workflowFile: "@testdata/complex-global-artifact-passing.yaml", + expectedArtifact: expectedArtifact{ + key: "finalTestUpdate", + bucketName: "my-bucket-3", + value: "Updated testUpdate", + }, + }, + } { + then := s.Given(). + Workflow(tt.workflowFile). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded, time.Minute*2). + Then(). + ExpectWorkflow(func(t *testing.T, objectMeta *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + // Check the global artifact value and see if it equals the expected value. + c, err := minio.New("localhost:9000", &minio.Options{ + Creds: credentials.NewStaticV4("admin", "password", ""), + }) + + if err != nil { + t.Error(err) + } + + object, err := c.GetObject(context.Background(), tt.expectedArtifact.bucketName, tt.expectedArtifact.key, minio.GetObjectOptions{}) + if err != nil { + t.Error(err) + } + + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(object) + if err != nil { + t.Error(err) + } + value := buf.String() + + assert.Equal(t, tt.expectedArtifact.value, value) + }) + + then. + When(). + RemoveFinalizers(false) + } +} + type artifactState struct { key string bucketName string @@ -157,6 +224,26 @@ func (s *ArtifactsSuite) TestStoppedWorkflow() { } } +func (s *ArtifactsSuite) TestDeleteWorkflow() { + when := s.Given(). + Workflow("@testdata/artifactgc/artgc-dag-wf-self-delete.yaml"). + When(). + SubmitWorkflow() + + then := when. + WaitForWorkflow(fixtures.ToBeCompleted). + Then(). + ExpectWorkflow(func(t *testing.T, objectMeta *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + assert.Contains(t, objectMeta.Finalizers, common.FinalizerArtifactGC) + }) + + when = then.When() + + when.WaitForWorkflowDeletion() + + when.RemoveFinalizers(false) +} + func (s *ArtifactsSuite) TestArtifactGC() { s.Given(). diff --git a/test/e2e/testdata/artifact-workflow-stopped.yaml b/test/e2e/testdata/artifact-workflow-stopped.yaml index 051decd7e010..599b9823a135 100644 --- a/test/e2e/testdata/artifact-workflow-stopped.yaml +++ b/test/e2e/testdata/artifact-workflow-stopped.yaml @@ -45,7 +45,7 @@ spec: while [ $x -le 60 ] do sleep 1 - if [ -f "/mnt/vol/test.txt" ]; then + if [ -f "/mnt/vol/test.txt" ] && [ -f "/mnt/vol/globaltest.txt" ]; then echo "Artifact found in shared volume" break fi @@ -73,6 +73,7 @@ spec: args: - | echo 'testing' > /mnt/vol/test.txt + echo 'testing global' > /mnt/vol/globaltest.txt echo "Artifact saved to /mnt/vol/test.txt" echo "Pretending to continue to do work." ls /mnt @@ -82,10 +83,26 @@ spec: done outputs: artifacts: - - name: artifact-creator + - name: local-artifact path: /mnt/vol/test.txt s3: - key: artifact-creator + key: local-artifact + bucket: my-bucket-3 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + archive: + none: {} + - name: global-artifact + globalName: global-artifact-global-name + path: /mnt/vol/globaltest.txt + s3: + key: global-artifact bucket: my-bucket-3 endpoint: minio:9000 insecure: true diff --git a/test/e2e/testdata/artifactgc/artgc-dag-wf-self-delete.yaml b/test/e2e/testdata/artifactgc/artgc-dag-wf-self-delete.yaml new file mode 100644 index 000000000000..0b216acef631 --- /dev/null +++ b/test/e2e/testdata/artifactgc/artgc-dag-wf-self-delete.yaml @@ -0,0 +1,99 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artgc-dag-wf-self-delete- +spec: + workflowMetadata: + labels: + workflows.argoproj.io/test: "true" + workflows.argoproj.io/workflow: "artgc-dag-wf-self-delete" + podGC: + strategy: OnPodCompletion + artifactGC: + serviceAccountName: default + strategy: OnWorkflowDeletion + entrypoint: artgc-dag-wf-self-delete-main + serviceAccountName: argo + executor: + serviceAccountName: default + volumeClaimTemplates: + - metadata: + name: artifacts + spec: + accessModes: [ "ReadWriteOnce" ] + resources: + requests: + storage: 1Gi + templates: + - name: artgc-dag-wf-self-delete-main + dag: + tasks: + - name: create-artifact + template: artgc-dag-artifact-creator + - name: delay-delete-workflow + template: artgc-dag-delay-delete + dependencies: [create-artifact] + - name: delete-workflow + template: artgc-dag-workflow-deleter + dependencies: [delay-delete-workflow] + - name: artgc-dag-delay-delete + container: + image: alpine:latest + volumeMounts: + - name: artifacts + mountPath: /mnt/vol + command: [sh, -c] + args: + - | + echo "Delaying workflow delete" + ls /mnt + x=0 + while [ $x -le 60 ] + do + sleep 1 + if [ -f "/mnt/vol/test.txt" ]; then + echo "Artifacts found in shared volume" + break + fi + x=$(( $x + 1 )) + done + - name: artgc-dag-workflow-deleter + container: + image: argoproj/argocli:latest + args: + - delete + - -l + - workflows.argoproj.io/workflow=artgc-dag-wf-self-delete + - --namespace=argo + - --loglevel=debug + - name: artgc-dag-artifact-creator + metadata: + labels: + template: "artgc-dag-artifact-creator" + container: + image: alpine:latest + volumeMounts: + - name: artifacts + mountPath: /mnt/vol + command: [sh, -c] + args: + - | + echo 'testing' > /mnt/vol/test.txt + echo "Artifact saved to /mnt/vol/test.txt" + outputs: + artifacts: + - name: artifact + path: /mnt/vol/test.txt + s3: + key: artifact + bucket: my-bucket-3 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + artifactGC: + strategy: OnWorkflowDeletion \ No newline at end of file diff --git a/test/e2e/testdata/complex-global-artifact-passing.yaml b/test/e2e/testdata/complex-global-artifact-passing.yaml new file mode 100644 index 000000000000..effca2dcb8b5 --- /dev/null +++ b/test/e2e/testdata/complex-global-artifact-passing.yaml @@ -0,0 +1,205 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: global-artifact-passing- +spec: + entrypoint: test-root + templates: + - inputs: {} + metadata: {} + name: test-root + outputs: {} + steps: + - - arguments: {} + name: create-global-artifacts + template: create-global-artifacts + - - arguments: + artifacts: + - from: '{{workflow.outputs.artifacts.testInput}}' + name: testInput + - from: '{{workflow.outputs.artifacts.testUpdate}}' + name: testUpdate + name: nested-workflow-entrypoint + template: main + - - arguments: + artifacts: + - from: '{{workflow.outputs.artifacts.testUpload}}' + name: testUpload + name: upload-testupload-step + template: upload-testupload + - - arguments: + artifacts: + - from: '{{workflow.outputs.artifacts.testUpdate}}' + name: testUpdate + name: upload-testupdate-step + template: upload-testupdate + + - inputs: {} + metadata: {} + name: main + outputs: {} + steps: + - - arguments: + artifacts: + - from: '{{workflow.outputs.artifacts.testInput}}' + name: input + name: cp + template: cp + - - arguments: + artifacts: + - from: '{{workflow.outputs.artifacts.testUpdate}}' + name: input-parameters + name: generate-testupdate-update + template: generate-testupdate-update + - - arguments: + artifacts: + - from: '{{steps.cp.outputs.artifacts.upload}}' + name: testUpload + name: output-testupload + template: output-testupload + - - arguments: + artifacts: + - from: '{{steps.generate-testupdate-update.outputs.artifacts.updated-testupdate}}' + name: testUpdate + name: output-testupdate + template: output-testupdate + + + + - container: + image: alpine:3.7 + command: [sh, -c] + args: ["sleep 1; echo -n 'test input' > /testInput.txt; echo -n 'test update' > /testUpdate.txt"] + name: create-global-artifacts + outputs: + artifacts: + - globalName: testInput + name: testInput + path: /testInput.txt + archive: + none: {} + - globalName: testUpdate + name: testUpdate + path: /testUpdate.txt + archive: + none: {} + + + + - container: + command: [sh, -c] + args: ["sleep 1; cp /input.txt /upload.txt"] + image: alpine:3.7 + name: "" + resources: {} + inputs: + artifacts: + - name: input + path: /input.txt + metadata: {} + name: cp + outputs: + artifacts: + - name: upload + path: /upload.txt + + - container: + command: [sh, -c] + args: ["sleep 1; echo -n 'Updated testUpdate' > /updated-testUpdate.txt"] + image: alpine:3.18.4 + metadata: {} + name: generate-testupdate-update + outputs: + artifacts: + - name: updated-testupdate + path: /updated-testUpdate.txt + archive: + none: {} + + - container: + command: [sh, -c] + args: ["sleep 1"] + image: alpine:3.18.4 + name: "" + resources: {} + inputs: + artifacts: + - name: testUpload + path: /testUpload.txt + metadata: {} + name: output-testupload + outputs: + artifacts: + - globalName: testUpload + name: testUpload + path: /testUpload.txt + + - container: + image: alpine:3.18.4 + command: [sh, -c] + args: ["sleep 1"] + name: "" + resources: {} + inputs: + artifacts: + - name: testUpdate + path: /testUpdate.txt + metadata: {} + name: output-testupdate + outputs: + artifacts: + - globalName: testUpdate + name: testUpdate + path: /testUpdate.txt + + + + - container: + command: [sh, -c] + args: ["sleep 1; cat /upload/testUpload; cat /upload/testUpload.txt > /upload/testUpload.txt"] + image: alpine:3.18.4 + name: "" + resources: {} + inputs: + artifacts: + - name: testUpload + path: /upload/testUpload.txt + metadata: {} + name: upload-testupload + outputs: + artifacts: + - globalName: uploadresult + name: uploadresult + path: /upload/testUpload.txt + + - container: + command: [sh, -c] + args: ["sleep 1; cat /upload/testUpdate.txt"] + image: alpine:3.18.4 + name: "" + resources: {} + inputs: + artifacts: + - name: testUpdate + path: /upload/testUpdate.txt + metadata: {} + name: upload-testupdate + outputs: + artifacts: + - name: finalTestUpdate + path: /upload/testUpdate.txt + archive: + none: {} + s3: + key: finalTestUpdate + bucket: my-bucket-3 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + artifactGC: + strategy: OnWorkflowDeletion diff --git a/test/e2e/testdata/global-artifact-passing.yaml b/test/e2e/testdata/global-artifact-passing.yaml new file mode 100644 index 000000000000..35635c6cf102 --- /dev/null +++ b/test/e2e/testdata/global-artifact-passing.yaml @@ -0,0 +1,92 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: global-artifact-passing- +spec: + entrypoint: main + templates: + - name: main + outputs: {} + steps: + - - name: create-global-artifact + template: create-global-artifact + - - name: add-0 + template: add-to-global-artifact + arguments: + artifacts: + - name: input + from: '{{workflow.outputs.artifacts.globalArtifact}}' + parameters: + - name: value + value: '0' + - - name: add-1 + template: add-to-global-artifact + arguments: + artifacts: + - name: input + from: '{{workflow.outputs.artifacts.globalArtifact}}' + parameters: + - name: value + value: '1' + - - name: save-artifact + template: save-artifact + arguments: + artifacts: + - name: input + from: '{{workflow.outputs.artifacts.globalArtifact}}' + - name: create-global-artifact + container: + image: argoproj/argosay:v2 + command: [sh, -c] + args: ["touch /tmp/artifact.txt"] + outputs: + artifacts: + - globalName: globalArtifact + name: artifact + path: /tmp/artifact.txt + archive: + none: {} + - name: add-to-global-artifact + inputs: + parameters: + - name: value + artifacts: + - name: input + path: /tmp/artifact.txt + outputs: + artifacts: + - globalName: globalArtifact + name: artifact + path: /tmp/artifact.txt + archive: + none: {} + container: + image: argoproj/argosay:v2 + command: [sh, -c] + args: ["echo -n {{inputs.parameters.value}} >> /tmp/artifact.txt"] + - name: save-artifact + container: + image: argoproj/argosay:v2 + inputs: + artifacts: + - name: input + path: /tmp/artifact.txt + outputs: + artifacts: + - name: globalArtifact + path: /tmp/artifact.txt + archive: + none: {} + s3: + key: globalArtifact + bucket: my-bucket-3 + endpoint: minio:9000 + insecure: true + accessKeySecret: + name: my-minio-cred + key: accesskey + secretKeySecret: + name: my-minio-cred + key: secretkey + artifactGC: + strategy: OnWorkflowDeletion diff --git a/workflow/controller/taskresult.go b/workflow/controller/taskresult.go index 89448c7aab5a..7a8a014e841e 100644 --- a/workflow/controller/taskresult.go +++ b/workflow/controller/taskresult.go @@ -86,8 +86,6 @@ func (woc *wfOperationCtx) taskResultReconciliation() { if old.Outputs != nil && newNode.Outputs.ExitCode == nil { // prevent overwriting of ExitCode newNode.Outputs.ExitCode = old.Outputs.ExitCode } - // Add outputs to global scope here to ensure that they are reflected in archive. - woc.addOutputsToGlobalScope(newNode.Outputs) } if result.Progress.IsValid() { newNode.Progress = result.Progress From 33521350ebd287ca16c7c76df94bb9a492a4dff9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Pedro?= <39736905+Sule26@users.noreply.github.com> Date: Fri, 19 Jan 2024 15:48:32 -0300 Subject: [PATCH 16/38] fix: update minio chart repo (#12552) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: João Pedro <39736905+Sule26@users.noreply.github.com> Signed-off-by: Isitha Subasinghe --- docs/configure-artifact-repository.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/configure-artifact-repository.md b/docs/configure-artifact-repository.md index 5cf1fe7eb5a2..8c4f227301f5 100644 --- a/docs/configure-artifact-repository.md +++ b/docs/configure-artifact-repository.md @@ -30,7 +30,7 @@ You can install MinIO into your cluster via Helm. First, [install `helm`](https://helm.sh/docs/intro/install/). Then, install MinIO with the below commands: ```bash -helm repo add minio https://helm.min.io/ # official minio Helm charts +helm repo add minio https://charts.min.io/ # official minio Helm charts helm repo update helm install argo-artifacts minio/minio --set service.type=LoadBalancer --set fullnameOverride=argo-artifacts ``` From 8c75a72a5b15ac39b5cddfed0886d3f76dcf9e3d Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Fri, 19 Jan 2024 20:29:19 +0800 Subject: [PATCH 17/38] fix: cache configmap don't create with workflow has retrystrategy. Fixes: #12490 #10426 (#12491) Signed-off-by: Isitha Subasinghe --- workflow/controller/exit_handler.go | 3 +- workflow/controller/hooks.go | 3 +- workflow/controller/operator.go | 22 ++- workflow/controller/operator_test.go | 209 +++++++++++++++++++++++++++ 4 files changed, 231 insertions(+), 6 deletions(-) diff --git a/workflow/controller/exit_handler.go b/workflow/controller/exit_handler.go index 238b10c2119e..a5b20d02506a 100644 --- a/workflow/controller/exit_handler.go +++ b/workflow/controller/exit_handler.go @@ -15,8 +15,7 @@ import ( func (woc *wfOperationCtx) runOnExitNode(ctx context.Context, exitHook *wfv1.LifecycleHook, parentNode *wfv1.NodeStatus, boundaryID string, tmplCtx *templateresolution.Context, prefix string, scope *wfScope) (bool, *wfv1.NodeStatus, error) { outputs := parentNode.Outputs - if parentNode.Type == wfv1.NodeTypeRetry { - lastChildNode := getChildNodeIndex(parentNode, woc.wf.Status.Nodes, -1) + if lastChildNode := woc.possiblyGetRetryChildNode(parentNode); lastChildNode != nil { outputs = lastChildNode.Outputs } diff --git a/workflow/controller/hooks.go b/workflow/controller/hooks.go index 750ab939e915..8e86d61c5393 100644 --- a/workflow/controller/hooks.go +++ b/workflow/controller/hooks.go @@ -75,8 +75,7 @@ func (woc *wfOperationCtx) executeTmplLifeCycleHook(ctx context.Context, scope * // executeTemplated should be invoked when hookedNode != nil, because we should reexecute the function to check mutex condition, etc. if execute || hookedNode != nil { outputs := parentNode.Outputs - if parentNode.Type == wfv1.NodeTypeRetry { - lastChildNode := getChildNodeIndex(parentNode, woc.wf.Status.Nodes, -1) + if lastChildNode := woc.possiblyGetRetryChildNode(parentNode); lastChildNode != nil { outputs = lastChildNode.Outputs } woc.log.WithField("lifeCycleHook", hookName).WithField("node", hookNodeName).WithField("hookName", hookName).Info("Running hooks") diff --git a/workflow/controller/operator.go b/workflow/controller/operator.go index 8113de7a6ca6..48b464af21e1 100644 --- a/workflow/controller/operator.go +++ b/workflow/controller/operator.go @@ -1732,6 +1732,14 @@ func (woc *wfOperationCtx) deletePVCs(ctx context.Context) error { return firstErr } +// Check if we have a retry node which wasn't memoized and return that if we do +func (woc *wfOperationCtx) possiblyGetRetryChildNode(node *wfv1.NodeStatus) *wfv1.NodeStatus { + if node.Type == wfv1.NodeTypeRetry && !(node.MemoizationStatus != nil && node.MemoizationStatus.Hit) { + return getChildNodeIndex(node, woc.wf.Status.Nodes, -1) + } + return nil +} + func getChildNodeIndex(node *wfv1.NodeStatus, nodes wfv1.Nodes, index int) *wfv1.NodeStatus { if len(node.Children) <= 0 { return nil @@ -2405,6 +2413,16 @@ func (woc *wfOperationCtx) initializeExecutableNode(nodeName string, nodeType wf node.Inputs = executeTmpl.Inputs.DeepCopy() } + // Set the MemoizationStatus + if node.MemoizationStatus == nil && executeTmpl.Memoize != nil { + memoizationStatus := &wfv1.MemoizationStatus{ + Hit: false, + Key: executeTmpl.Memoize.Key, + CacheName: executeTmpl.Memoize.Cache.ConfigMap.Name, + } + node.MemoizationStatus = memoizationStatus + } + if nodeType == wfv1.NodeTypeSuspend { node = addRawOutputFields(node, executeTmpl) } @@ -2974,8 +2992,8 @@ func (woc *wfOperationCtx) requeueIfTransientErr(err error, nodeName string) (*w func (woc *wfOperationCtx) buildLocalScope(scope *wfScope, prefix string, node *wfv1.NodeStatus) { // It may be that the node is a retry node, in which case we want to get the outputs of the last node // in the retry group instead of the retry node itself. - if node.Type == wfv1.NodeTypeRetry { - node = getChildNodeIndex(node, woc.wf.Status.Nodes, -1) + if lastChildNode := woc.possiblyGetRetryChildNode(node); lastChildNode != nil { + node = lastChildNode } if node.ID != "" { diff --git a/workflow/controller/operator_test.go b/workflow/controller/operator_test.go index d0f2aebbde63..85e91acdf638 100644 --- a/workflow/controller/operator_test.go +++ b/workflow/controller/operator_test.go @@ -5419,6 +5419,215 @@ func TestConfigMapCacheLoadOperateMaxAge(t *testing.T) { } } +var workflowStepCachedWithRetryStrategy = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + name: memoized-workflow-test +spec: + entrypoint: whalesay + arguments: + parameters: + - name: message + value: hi-there-world + templates: + - name: whalesay + inputs: + parameters: + - name: message + retryStrategy: + limit: "10" + memoize: + key: "{{inputs.parameters.message}}" + cache: + configMap: + name: whalesay-cache + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["sleep 10; cowsay {{inputs.parameters.message}} > /tmp/hello_world.txt"] + outputs: + parameters: + - name: hello + valueFrom: + path: /tmp/hello_world.txt +` + +var workflowDagCachedWithRetryStrategy = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: memoized-workflow-test +spec: + entrypoint: main +# podGC: +# strategy: OnPodCompletion + templates: + - name: main + dag: + tasks: + - name: regular-1 + template: run + arguments: + parameters: + - name: id + value: 1 + - name: cache-key + value: '{{workflow.name}}' + - name: regular-2 + template: run + depends: regular-1.Succeeded + arguments: + parameters: + - name: id + value: 2 + - name: cache-key + value: '{{workflow.name}}' + - name: with-retries-1 + template: run-with-retries + arguments: + parameters: + - name: id + value: 3 + - name: cache-key + value: '{{workflow.name}}' + - name: with-retries-2 + template: run-with-retries + depends: with-retries-1.Succeeded + arguments: + parameters: + - name: id + value: 4 + - name: cache-key + value: '{{workflow.name}}' + - name: with-dag-1 + template: run-with-dag + arguments: + parameters: + - name: id + value: 5 + - name: cache-key + value: '{{workflow.name}}' + - name: with-dag-2 + template: run-with-dag + depends: with-dag-1.Succeeded + arguments: + parameters: + - name: id + value: 6 + - name: cache-key + value: '{{workflow.name}}' + + - name: run + inputs: + parameters: + - name: id + - name: cache-key + script: + image: ubuntu:22.04 + command: [bash] + source: | + sleep 30 + echo result: {{inputs.parameters.id}} + memoize: + key: "regular-{{inputs.parameters.cache-key}}" + cache: + configMap: + name: memoization-test-cache + + - name: run-with-retries + inputs: + parameters: + - name: id + - name: cache-key + script: + image: ubuntu:22.04 + command: [bash] + source: | + sleep 30 + echo result: {{inputs.parameters.id}} + memoize: + key: "retry-{{inputs.parameters.cache-key}}" + cache: + configMap: + name: memoization-test-cache + retryStrategy: + limit: '1' + retryPolicy: Always + + - name: run-raw + inputs: + parameters: + - name: id + - name: cache-key + script: + image: ubuntu:22.04 + command: [bash] + source: | + sleep 30 + echo result: {{inputs.parameters.id}} + + - name: run-with-dag + inputs: + parameters: + - name: id + - name: cache-key + dag: + tasks: + - name: run-raw-step + template: run-raw + arguments: + parameters: + - name: id + value: '{{inputs.parameters.id}}' + - name: cache-key + value: '{{inputs.parameters.cache-key}}' + memoize: + key: "dag-{{inputs.parameters.cache-key}}" + cache: + configMap: + name: memoization-test-cache` + +func TestStepConfigMapCacheCreateWhenHaveRetryStrategy(t *testing.T) { + wf := wfv1.MustUnmarshalWorkflow(workflowStepCachedWithRetryStrategy) + cancel, controller := newController() + defer cancel() + + ctx := context.Background() + _, err := controller.wfclientset.ArgoprojV1alpha1().Workflows(wf.ObjectMeta.Namespace).Create(ctx, wf, metav1.CreateOptions{}) + assert.NoError(t, err) + + woc := newWorkflowOperationCtx(wf, controller) + woc.operate(ctx) + makePodsPhase(ctx, woc, apiv1.PodSucceeded) + woc.operate(ctx) + cm, err := controller.kubeclientset.CoreV1().ConfigMaps("default").Get(ctx, "whalesay-cache", metav1.GetOptions{}) + assert.NoError(t, err) + assert.Contains(t, cm.Labels, common.LabelKeyConfigMapType) + assert.Equal(t, common.LabelValueTypeConfigMapCache, cm.Labels[common.LabelKeyConfigMapType]) + assert.Equal(t, wfv1.WorkflowSucceeded, woc.wf.Status.Phase) +} + +func TestDAGConfigMapCacheCreateWhenHaveRetryStrategy(t *testing.T) { + wf := wfv1.MustUnmarshalWorkflow(workflowDagCachedWithRetryStrategy) + cancel, controller := newController() + defer cancel() + + ctx := context.Background() + _, err := controller.wfclientset.ArgoprojV1alpha1().Workflows(wf.ObjectMeta.Namespace).Create(ctx, wf, metav1.CreateOptions{}) + assert.NoError(t, err) + + woc := newWorkflowOperationCtx(wf, controller) + woc.operate(ctx) + makePodsPhase(ctx, woc, apiv1.PodSucceeded) + woc.operate(ctx) + cm, err := controller.kubeclientset.CoreV1().ConfigMaps("default").Get(ctx, "memoization-test-cache", metav1.GetOptions{}) + assert.NoError(t, err) + assert.Contains(t, cm.Labels, common.LabelKeyConfigMapType) + assert.Equal(t, common.LabelValueTypeConfigMapCache, cm.Labels[common.LabelKeyConfigMapType]) + assert.Equal(t, wfv1.WorkflowSucceeded, woc.wf.Status.Phase) +} + func TestConfigMapCacheLoadNoLabels(t *testing.T) { sampleConfigMapCacheEntry := apiv1.ConfigMap{ Data: map[string]string{ From 8e33da1a13ac6f8b09e45cac5ff39eab0927f498 Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Fri, 19 Jan 2024 00:37:36 +0800 Subject: [PATCH 18/38] fix: add resource quota evaluation timed out to transient (#12536) Signed-off-by: Isitha Subasinghe --- util/errors/errors.go | 6 +++++- util/errors/errors_test.go | 4 ++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/util/errors/errors.go b/util/errors/errors.go index 66feb979b9f2..156db1d0308a 100644 --- a/util/errors/errors.go +++ b/util/errors/errors.go @@ -28,7 +28,7 @@ func IsTransientErr(err error) bool { return false } err = argoerrs.Cause(err) - isTransient := isExceededQuotaErr(err) || apierr.IsTooManyRequests(err) || isResourceQuotaConflictErr(err) || isTransientNetworkErr(err) || apierr.IsServerTimeout(err) || apierr.IsServiceUnavailable(err) || matchTransientErrPattern(err) || + isTransient := isExceededQuotaErr(err) || apierr.IsTooManyRequests(err) || isResourceQuotaConflictErr(err) || isResourceQuotaTimeoutErr(err) || isTransientNetworkErr(err) || apierr.IsServerTimeout(err) || apierr.IsServiceUnavailable(err) || matchTransientErrPattern(err) || errors.Is(err, NewErrTransient("")) if isTransient { log.Infof("Transient error: %v", err) @@ -57,6 +57,10 @@ func isResourceQuotaConflictErr(err error) bool { return apierr.IsConflict(err) && strings.Contains(err.Error(), "Operation cannot be fulfilled on resourcequota") } +func isResourceQuotaTimeoutErr(err error) bool { + return apierr.IsInternalError(err) && strings.Contains(err.Error(), "resource quota evaluation timed out") +} + func isTransientNetworkErr(err error) bool { switch err.(type) { case *net.DNSError, *net.OpError, net.UnknownNetworkError: diff --git a/util/errors/errors_test.go b/util/errors/errors_test.go index 5b044edb47ff..8374bc0941f9 100644 --- a/util/errors/errors_test.go +++ b/util/errors/errors_test.go @@ -57,6 +57,10 @@ func TestIsTransientErr(t *testing.T) { assert.False(t, IsTransientErr(apierr.NewConflict(schema.GroupResource{}, "", nil))) assert.True(t, IsTransientErr(apierr.NewConflict(schema.GroupResource{Group: "v1", Resource: "resourcequotas"}, "", nil))) }) + t.Run("ResourceQuotaTimeoutErr", func(t *testing.T) { + assert.False(t, IsTransientErr(apierr.NewInternalError(errors.New("")))) + assert.True(t, IsTransientErr(apierr.NewInternalError(errors.New("resource quota evaluation timed out")))) + }) t.Run("ExceededQuotaErr", func(t *testing.T) { assert.False(t, IsTransientErr(apierr.NewForbidden(schema.GroupResource{}, "", nil))) assert.True(t, IsTransientErr(apierr.NewForbidden(schema.GroupResource{Group: "v1", Resource: "pods"}, "", errors.New("exceeded quota")))) From 23b1a4b244e3e2ae1169854bf7f90ad60de2b62f Mon Sep 17 00:00:00 2001 From: Dennis Lawler <4824647+drawlerr@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:43:45 -0700 Subject: [PATCH 19/38] fix: prevent update race in workflow cache (Fixes #9574) (#12233) Signed-off-by: Dennis Lawler Signed-off-by: Dennis Lawler <4824647+drawlerr@users.noreply.github.com> Signed-off-by: Isitha Subasinghe --- workflow/controller/controller.go | 51 ++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/workflow/controller/controller.go b/workflow/controller/controller.go index 9b4555faa209..6451b4e9ce12 100644 --- a/workflow/controller/controller.go +++ b/workflow/controller/controller.go @@ -649,13 +649,23 @@ func (wfc *WorkflowController) deleteOffloadedNodesForWorkflow(uid string, versi if !ok { return fmt.Errorf("object %+v is not an unstructured", workflows[0]) } + key := un.GetNamespace() + "/" + un.GetName() + wfc.workflowKeyLock.Lock(key) + defer wfc.workflowKeyLock.Unlock(key) + + obj, ok := wfc.getWorkflowByKey(key) + if !ok { + return fmt.Errorf("failed to get workflow by key after locking") + } + un, ok = obj.(*unstructured.Unstructured) + if !ok { + return fmt.Errorf("object %+v is not an unstructured", obj) + } wf, err = util.FromUnstructured(un) if err != nil { return err } - key := wf.ObjectMeta.Namespace + "/" + wf.ObjectMeta.Name - wfc.workflowKeyLock.Lock(key) - defer wfc.workflowKeyLock.Unlock(key) + // workflow might still be hydrated if wfc.hydrator.IsHydrated(wf) { log.WithField("uid", wf.UID).Info("Hydrated workflow encountered") @@ -729,20 +739,14 @@ func (wfc *WorkflowController) processNextItem(ctx context.Context) bool { } defer wfc.wfQueue.Done(key) - obj, exists, err := wfc.wfInformer.GetIndexer().GetByKey(key.(string)) - if err != nil { - log.WithFields(log.Fields{"key": key, "error": err}).Error("Failed to get workflow from informer") - return true - } - if !exists { - // This happens after a workflow was labeled with completed=true - // or was deleted, but the work queue still had an entry for it. - return true - } - wfc.workflowKeyLock.Lock(key.(string)) defer wfc.workflowKeyLock.Unlock(key.(string)) + obj, ok := wfc.getWorkflowByKey(key.(string)) + if !ok { + return true + } + // The workflow informer receives unstructured objects to deal with the possibility of invalid // workflow manifests that are unable to unmarshal to workflow objects un, ok := obj.(*unstructured.Unstructured) @@ -810,6 +814,20 @@ func (wfc *WorkflowController) processNextItem(ctx context.Context) bool { return true } +func (wfc *WorkflowController) getWorkflowByKey(key string) (interface{}, bool) { + obj, exists, err := wfc.wfInformer.GetIndexer().GetByKey(key) + if err != nil { + log.WithFields(log.Fields{"key": key, "error": err}).Error("Failed to get workflow from informer") + return nil, false + } + if !exists { + // This happens after a workflow was labeled with completed=true + // or was deleted, but the work queue still had an entry for it. + return nil, false + } + return obj, true +} + func reconciliationNeeded(wf metav1.Object) bool { return wf.GetLabels()[common.LabelKeyCompleted] != "true" || slices.Contains(wf.GetFinalizers(), common.FinalizerArtifactGC) } @@ -1013,6 +1031,11 @@ func (wfc *WorkflowController) archiveWorkflow(ctx context.Context, obj interfac } wfc.workflowKeyLock.Lock(key) defer wfc.workflowKeyLock.Unlock(key) + key, err = cache.MetaNamespaceKeyFunc(obj) + if err != nil { + log.Error("failed to get key for object after locking") + return + } err = wfc.archiveWorkflowAux(ctx, obj) if err != nil { log.WithField("key", key).WithError(err).Error("failed to archive workflow") From 16c4970e78c5f15ced290b7ae7d330e6c6252467 Mon Sep 17 00:00:00 2001 From: Ruin09 Date: Tue, 16 Jan 2024 15:36:19 +0900 Subject: [PATCH 20/38] fix: Fixed mutex with withSequence in http template broken. Fixes #12018 (#12176) Signed-off-by: shmruin Signed-off-by: Isitha Subasinghe --- pkg/apis/workflow/v1alpha1/workflow_types.go | 2 ++ workflow/controller/http_template.go | 2 ++ 2 files changed, 4 insertions(+) diff --git a/pkg/apis/workflow/v1alpha1/workflow_types.go b/pkg/apis/workflow/v1alpha1/workflow_types.go index 5a7c3cf60536..3d29464e67fe 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_types.go +++ b/pkg/apis/workflow/v1alpha1/workflow_types.go @@ -3015,6 +3015,8 @@ func (tmpl *Template) GetNodeType() NodeType { return NodeTypeSteps case TemplateTypeSuspend: return NodeTypeSuspend + case TemplateTypeHTTP: + return NodeTypeHTTP case TemplateTypePlugin: return NodeTypePlugin } diff --git a/workflow/controller/http_template.go b/workflow/controller/http_template.go index 0bbd9fcbfde2..de6a7048c1c6 100644 --- a/workflow/controller/http_template.go +++ b/workflow/controller/http_template.go @@ -8,6 +8,8 @@ func (woc *wfOperationCtx) executeHTTPTemplate(nodeName string, templateScope st node, err := woc.wf.GetNodeByName(nodeName) if err != nil { node = woc.initializeExecutableNode(nodeName, wfv1.NodeTypeHTTP, templateScope, tmpl, orgTmpl, opts.boundaryID, wfv1.NodePending, opts.nodeFlag) + } + if !node.Fulfilled() { woc.taskSet[node.ID] = *tmpl } return node From d4d28b5c7cfc7baf8c2180019bdaa3e9b04decc9 Mon Sep 17 00:00:00 2001 From: Son Bui Date: Tue, 16 Jan 2024 14:29:27 +0800 Subject: [PATCH 21/38] fix: SSO with Jumpcloud "email_verified" field #12257 (#12318) Signed-off-by: Son Bui Signed-off-by: Isitha Subasinghe --- server/auth/types/claims.go | 6 +++++- server/auth/types/claims_test.go | 22 ++++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/server/auth/types/claims.go b/server/auth/types/claims.go index f4d7c8382cc0..677eef8d31be 100644 --- a/server/auth/types/claims.go +++ b/server/auth/types/claims.go @@ -12,7 +12,7 @@ type Claims struct { jwt.Claims Groups []string `json:"groups,omitempty"` Email string `json:"email,omitempty"` - EmailVerified bool `json:"email_verified,omitempty"` + EmailVerified bool `json:"-"` Name string `json:"name,omitempty"` ServiceAccountName string `json:"service_account_name,omitempty"` ServiceAccountNamespace string `json:"service_account_namespace,omitempty"` @@ -52,6 +52,10 @@ func (c *Claims) UnmarshalJSON(data []byte) error { return err } + if localClaim.RawClaim["email_verified"] == true || localClaim.RawClaim["email_verified"] == "true" { + localClaim.EmailVerified = true + } + *c = Claims(localClaim) return nil } diff --git a/server/auth/types/claims_test.go b/server/auth/types/claims_test.go index 047131e6ce01..1b87e8a4c336 100644 --- a/server/auth/types/claims_test.go +++ b/server/auth/types/claims_test.go @@ -134,6 +134,28 @@ func TestUnmarshalJSON(t *testing.T) { }, }, }, + { + description: "email verify field as string", + data: `{"email_verified":"true"}`, + expectedErr: nil, + expectedClaims: &Claims{ + RawClaim: map[string]interface{}{ + "email_verified": "true", + }, + EmailVerified: true, + }, + }, + { + description: "email verify field as bool", + data: `{"email_verified":true}`, + expectedErr: nil, + expectedClaims: &Claims{ + RawClaim: map[string]interface{}{ + "email_verified": true, + }, + EmailVerified: true, + }, + }, { description: "unmarshal no data", data: `{}`, From 02a3e2e399d90f59b4cb813aa41ad92aca045f03 Mon Sep 17 00:00:00 2001 From: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Date: Sun, 11 Feb 2024 16:17:14 -0500 Subject: [PATCH 22/38] fix(build): check for env vars in all dirs (#12652) Signed-off-by: Anton Gilgur Signed-off-by: Isitha Subasinghe --- cmd/argo/commands/client/conn.go | 2 +- cmd/argo/commands/server.go | 16 +------ docs/cli/argo.md | 2 +- docs/cli/argo_archive.md | 2 +- docs/cli/argo_archive_delete.md | 2 +- docs/cli/argo_archive_get.md | 2 +- docs/cli/argo_archive_list-label-keys.md | 2 +- docs/cli/argo_archive_list-label-values.md | 2 +- docs/cli/argo_archive_list.md | 2 +- docs/cli/argo_archive_resubmit.md | 2 +- docs/cli/argo_archive_retry.md | 2 +- docs/cli/argo_auth.md | 2 +- docs/cli/argo_auth_token.md | 2 +- docs/cli/argo_cluster-template.md | 2 +- docs/cli/argo_cluster-template_create.md | 2 +- docs/cli/argo_cluster-template_delete.md | 2 +- docs/cli/argo_cluster-template_get.md | 2 +- docs/cli/argo_cluster-template_lint.md | 2 +- docs/cli/argo_cluster-template_list.md | 2 +- docs/cli/argo_completion.md | 2 +- docs/cli/argo_cp.md | 2 +- docs/cli/argo_cron.md | 2 +- docs/cli/argo_cron_create.md | 2 +- docs/cli/argo_cron_delete.md | 2 +- docs/cli/argo_cron_get.md | 2 +- docs/cli/argo_cron_lint.md | 2 +- docs/cli/argo_cron_list.md | 2 +- docs/cli/argo_cron_resume.md | 2 +- docs/cli/argo_cron_suspend.md | 2 +- docs/cli/argo_delete.md | 2 +- docs/cli/argo_executor-plugin.md | 2 +- docs/cli/argo_executor-plugin_build.md | 2 +- docs/cli/argo_get.md | 2 +- docs/cli/argo_lint.md | 2 +- docs/cli/argo_list.md | 2 +- docs/cli/argo_logs.md | 2 +- docs/cli/argo_node.md | 2 +- docs/cli/argo_resubmit.md | 2 +- docs/cli/argo_resume.md | 2 +- docs/cli/argo_retry.md | 2 +- docs/cli/argo_server.md | 4 +- docs/cli/argo_stop.md | 2 +- docs/cli/argo_submit.md | 2 +- docs/cli/argo_suspend.md | 2 +- docs/cli/argo_template.md | 2 +- docs/cli/argo_template_create.md | 2 +- docs/cli/argo_template_delete.md | 2 +- docs/cli/argo_template_get.md | 2 +- docs/cli/argo_template_lint.md | 2 +- docs/cli/argo_template_list.md | 2 +- docs/cli/argo_terminate.md | 2 +- docs/cli/argo_version.md | 2 +- docs/cli/argo_wait.md | 2 +- docs/cli/argo_watch.md | 2 +- docs/environment-variables.md | 53 +++++++++++++++++++- hack/check-env-doc.sh | 56 +++++++++++++--------- 56 files changed, 140 insertions(+), 93 deletions(-) diff --git a/cmd/argo/commands/client/conn.go b/cmd/argo/commands/client/conn.go index 8d92587bbcab..995c9ee76a57 100644 --- a/cmd/argo/commands/client/conn.go +++ b/cmd/argo/commands/client/conn.go @@ -45,7 +45,7 @@ func AddAPIClientFlagsToCmd(cmd *cobra.Command) { cmd.PersistentFlags().StringVar(&instanceID, "instanceid", os.Getenv("ARGO_INSTANCEID"), "submit with a specific controller's instance id label. Default to the ARGO_INSTANCEID environment variable.") // "-s" like kubectl cmd.PersistentFlags().StringVarP(&ArgoServerOpts.URL, "argo-server", "s", os.Getenv("ARGO_SERVER"), "API server `host:port`. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable.") - cmd.PersistentFlags().StringVar(&ArgoServerOpts.Path, "argo-base-href", os.Getenv("ARGO_BASE_HREF"), "An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable.") + cmd.PersistentFlags().StringVar(&ArgoServerOpts.Path, "argo-base-href", os.Getenv("ARGO_BASE_HREF"), "Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable.") cmd.PersistentFlags().BoolVar(&ArgoServerOpts.HTTP1, "argo-http1", os.Getenv("ARGO_HTTP1") == "true", "If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable.") cmd.PersistentFlags().StringSliceVarP(&ArgoServerOpts.Headers, "header", "H", []string{}, "Sets additional header to all requests made by Argo CLI. (Can be repeated multiple times to add multiple headers, also supports comma separated headers) Used only when either ARGO_HTTP1 or --argo-http1 is set to true.") // "-e" for encrypted - like zip diff --git a/cmd/argo/commands/server.go b/cmd/argo/commands/server.go index 34f0bf661484..f38a3abae144 100644 --- a/cmd/argo/commands/server.go +++ b/cmd/argo/commands/server.go @@ -4,10 +4,8 @@ import ( "context" "crypto/tls" "fmt" - "net/http" "os" "reflect" - "strconv" "strings" "time" @@ -188,18 +186,6 @@ See %s`, help.ArgoServer), return err } - // disabled by default, for security - if x, enabled := os.LookupEnv("ARGO_SERVER_PPROF"); enabled { - port, err := strconv.Atoi(x) - if err != nil { - return err - } - go func() { - log.Infof("starting server for pprof on :%d, see https://golang.org/pkg/net/http/pprof/", port) - log.Println(http.ListenAndServe(fmt.Sprintf(":%d", port), nil)) - }() - } - server.Run(ctx, port, browserOpenFunc) return nil }, @@ -232,7 +218,7 @@ See %s`, help.ArgoServer), command.Flags().StringVar(&frameOptions, "x-frame-options", "DENY", "Set X-Frame-Options header in HTTP responses.") command.Flags().StringVar(&accessControlAllowOrigin, "access-control-allow-origin", "", "Set Access-Control-Allow-Origin header in HTTP responses.") command.Flags().Uint64Var(&apiRateLimit, "api-rate-limit", 1000, "Set limit per IP for api ratelimiter") - command.Flags().StringArrayVar(&allowedLinkProtocol, "allowed-link-protocol", defaultAllowedLinkProtocol, "Allowed link protocol in configMap. Used if the allowed configMap links protocol are different from http,https. Defaults to the environment variable ALLOWED_LINK_PROTOCOL") + command.Flags().StringArrayVar(&allowedLinkProtocol, "allowed-link-protocol", defaultAllowedLinkProtocol, "Allowed protocols for links feature. Defaults to the environment variable ALLOWED_LINK_PROTOCOL: http,https") command.Flags().StringVar(&logFormat, "log-format", "text", "The formatter to use for logs. One of: text|json") command.Flags().Float32Var(&kubeAPIQPS, "kube-api-qps", 20.0, "QPS to use while talking with kube-apiserver.") command.Flags().IntVar(&kubeAPIBurst, "kube-api-burst", 30, "Burst to use while talking with kube-apiserver.") diff --git a/docs/cli/argo.md b/docs/cli/argo.md index 9723e6af8d33..69af54e3e99f 100644 --- a/docs/cli/argo.md +++ b/docs/cli/argo.md @@ -69,7 +69,7 @@ argo [flags] ### Options ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_archive.md b/docs/cli/argo_archive.md index 2375b7ff3a3f..ee5ab574fea1 100644 --- a/docs/cli/argo_archive.md +++ b/docs/cli/argo_archive.md @@ -15,7 +15,7 @@ argo archive [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_archive_delete.md b/docs/cli/argo_archive_delete.md index 8381976b866a..ca56e7a932ec 100644 --- a/docs/cli/argo_archive_delete.md +++ b/docs/cli/argo_archive_delete.md @@ -15,7 +15,7 @@ argo archive delete UID... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_archive_get.md b/docs/cli/argo_archive_get.md index 9568d375eca8..9f767f5be44a 100644 --- a/docs/cli/argo_archive_get.md +++ b/docs/cli/argo_archive_get.md @@ -16,7 +16,7 @@ argo archive get UID [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_archive_list-label-keys.md b/docs/cli/argo_archive_list-label-keys.md index f46da1a97622..e5edfb28efc1 100644 --- a/docs/cli/argo_archive_list-label-keys.md +++ b/docs/cli/argo_archive_list-label-keys.md @@ -15,7 +15,7 @@ argo archive list-label-keys [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_archive_list-label-values.md b/docs/cli/argo_archive_list-label-values.md index e41582333515..3251f3797f15 100644 --- a/docs/cli/argo_archive_list-label-values.md +++ b/docs/cli/argo_archive_list-label-values.md @@ -16,7 +16,7 @@ argo archive list-label-values [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_archive_list.md b/docs/cli/argo_archive_list.md index f02e35109ff7..6bbf283d335f 100644 --- a/docs/cli/argo_archive_list.md +++ b/docs/cli/argo_archive_list.md @@ -18,7 +18,7 @@ argo archive list [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_archive_resubmit.md b/docs/cli/argo_archive_resubmit.md index 0ce78536b3c9..4fb78fe85e74 100644 --- a/docs/cli/argo_archive_resubmit.md +++ b/docs/cli/argo_archive_resubmit.md @@ -57,7 +57,7 @@ argo archive resubmit [WORKFLOW...] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_archive_retry.md b/docs/cli/argo_archive_retry.md index 710bdbbbda9f..1dbc9c7a1f65 100644 --- a/docs/cli/argo_archive_retry.md +++ b/docs/cli/argo_archive_retry.md @@ -57,7 +57,7 @@ argo archive retry [WORKFLOW...] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_auth.md b/docs/cli/argo_auth.md index e7ca7f211418..5b5e9b60a740 100644 --- a/docs/cli/argo_auth.md +++ b/docs/cli/argo_auth.md @@ -15,7 +15,7 @@ argo auth [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_auth_token.md b/docs/cli/argo_auth_token.md index 28c320441e6c..ec0c33901833 100644 --- a/docs/cli/argo_auth_token.md +++ b/docs/cli/argo_auth_token.md @@ -15,7 +15,7 @@ argo auth token [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cluster-template.md b/docs/cli/argo_cluster-template.md index 5c6c8a5405c9..9419675090d6 100644 --- a/docs/cli/argo_cluster-template.md +++ b/docs/cli/argo_cluster-template.md @@ -15,7 +15,7 @@ argo cluster-template [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cluster-template_create.md b/docs/cli/argo_cluster-template_create.md index 4b8d7a40d89b..9672e14a7f06 100644 --- a/docs/cli/argo_cluster-template_create.md +++ b/docs/cli/argo_cluster-template_create.md @@ -17,7 +17,7 @@ argo cluster-template create FILE1 FILE2... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cluster-template_delete.md b/docs/cli/argo_cluster-template_delete.md index 99c8c2f9c5bc..140aee41f2b0 100644 --- a/docs/cli/argo_cluster-template_delete.md +++ b/docs/cli/argo_cluster-template_delete.md @@ -16,7 +16,7 @@ argo cluster-template delete WORKFLOW_TEMPLATE [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cluster-template_get.md b/docs/cli/argo_cluster-template_get.md index 89c297589cb9..5ed1f7767a59 100644 --- a/docs/cli/argo_cluster-template_get.md +++ b/docs/cli/argo_cluster-template_get.md @@ -16,7 +16,7 @@ argo cluster-template get CLUSTER WORKFLOW_TEMPLATE... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cluster-template_lint.md b/docs/cli/argo_cluster-template_lint.md index 49b6a79ad9c5..0c5b2947283e 100644 --- a/docs/cli/argo_cluster-template_lint.md +++ b/docs/cli/argo_cluster-template_lint.md @@ -17,7 +17,7 @@ argo cluster-template lint FILE... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cluster-template_list.md b/docs/cli/argo_cluster-template_list.md index 9526874ab320..0a9ea4147fb2 100644 --- a/docs/cli/argo_cluster-template_list.md +++ b/docs/cli/argo_cluster-template_list.md @@ -16,7 +16,7 @@ argo cluster-template list [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_completion.md b/docs/cli/argo_completion.md index 136e4abdb49e..4470f00ca393 100644 --- a/docs/cli/argo_completion.md +++ b/docs/cli/argo_completion.md @@ -28,7 +28,7 @@ argo completion SHELL [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cp.md b/docs/cli/argo_cp.md index 4deabf3cd0e9..d3a5305fd942 100644 --- a/docs/cli/argo_cp.md +++ b/docs/cli/argo_cp.md @@ -33,7 +33,7 @@ argo cp my-wf output-directory ... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cron.md b/docs/cli/argo_cron.md index b4807f0382da..880b011562b0 100644 --- a/docs/cli/argo_cron.md +++ b/docs/cli/argo_cron.md @@ -19,7 +19,7 @@ argo cron [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cron_create.md b/docs/cli/argo_cron_create.md index 4964e04ef43b..024440fbe698 100644 --- a/docs/cli/argo_cron_create.md +++ b/docs/cli/argo_cron_create.md @@ -25,7 +25,7 @@ argo cron create FILE1 FILE2... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cron_delete.md b/docs/cli/argo_cron_delete.md index 14352346708f..cecfb07b81ef 100644 --- a/docs/cli/argo_cron_delete.md +++ b/docs/cli/argo_cron_delete.md @@ -16,7 +16,7 @@ argo cron delete [CRON_WORKFLOW... | --all] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cron_get.md b/docs/cli/argo_cron_get.md index 73badb4f7af9..6c4516a7ddfd 100644 --- a/docs/cli/argo_cron_get.md +++ b/docs/cli/argo_cron_get.md @@ -16,7 +16,7 @@ argo cron get CRON_WORKFLOW... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cron_lint.md b/docs/cli/argo_cron_lint.md index 85768db3cc27..311007b60102 100644 --- a/docs/cli/argo_cron_lint.md +++ b/docs/cli/argo_cron_lint.md @@ -17,7 +17,7 @@ argo cron lint FILE... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cron_list.md b/docs/cli/argo_cron_list.md index 7a89024cb911..e719b8246044 100644 --- a/docs/cli/argo_cron_list.md +++ b/docs/cli/argo_cron_list.md @@ -18,7 +18,7 @@ argo cron list [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cron_resume.md b/docs/cli/argo_cron_resume.md index d02698edef36..6b02799cad5a 100644 --- a/docs/cli/argo_cron_resume.md +++ b/docs/cli/argo_cron_resume.md @@ -15,7 +15,7 @@ argo cron resume [CRON_WORKFLOW...] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_cron_suspend.md b/docs/cli/argo_cron_suspend.md index 78ccaf259cf5..3f592bbfb98e 100644 --- a/docs/cli/argo_cron_suspend.md +++ b/docs/cli/argo_cron_suspend.md @@ -15,7 +15,7 @@ argo cron suspend CRON_WORKFLOW... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_delete.md b/docs/cli/argo_delete.md index 0f71b297a920..8ae752e1b568 100644 --- a/docs/cli/argo_delete.md +++ b/docs/cli/argo_delete.md @@ -40,7 +40,7 @@ argo delete [--dry-run] [WORKFLOW...|[--all] [--older] [--completed] [--resubmit ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_executor-plugin.md b/docs/cli/argo_executor-plugin.md index df57b63e03cc..63461a85976d 100644 --- a/docs/cli/argo_executor-plugin.md +++ b/docs/cli/argo_executor-plugin.md @@ -15,7 +15,7 @@ argo executor-plugin [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_executor-plugin_build.md b/docs/cli/argo_executor-plugin_build.md index 6134508d21ac..27e4216db55a 100644 --- a/docs/cli/argo_executor-plugin_build.md +++ b/docs/cli/argo_executor-plugin_build.md @@ -15,7 +15,7 @@ argo executor-plugin build DIR [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_get.md b/docs/cli/argo_get.md index 72fc870492d3..8509a6a3b385 100644 --- a/docs/cli/argo_get.md +++ b/docs/cli/argo_get.md @@ -32,7 +32,7 @@ argo get WORKFLOW... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_lint.md b/docs/cli/argo_lint.md index 426d8b928710..2a4cf53aad61 100644 --- a/docs/cli/argo_lint.md +++ b/docs/cli/argo_lint.md @@ -32,7 +32,7 @@ argo lint FILE... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_list.md b/docs/cli/argo_list.md index d8d619c3b182..f9497332fd44 100644 --- a/docs/cli/argo_list.md +++ b/docs/cli/argo_list.md @@ -28,7 +28,7 @@ argo list [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_logs.md b/docs/cli/argo_logs.md index fc9f9ffb8a64..530b9d8d7202 100644 --- a/docs/cli/argo_logs.md +++ b/docs/cli/argo_logs.md @@ -57,7 +57,7 @@ argo logs WORKFLOW [POD] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_node.md b/docs/cli/argo_node.md index d291e1eccf19..df7120ef3bcb 100644 --- a/docs/cli/argo_node.md +++ b/docs/cli/argo_node.md @@ -32,7 +32,7 @@ argo node ACTION WORKFLOW FLAGS [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_resubmit.md b/docs/cli/argo_resubmit.md index 0aa383f8be8e..1173a2c520c7 100644 --- a/docs/cli/argo_resubmit.md +++ b/docs/cli/argo_resubmit.md @@ -65,7 +65,7 @@ argo resubmit [WORKFLOW...] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_resume.md b/docs/cli/argo_resume.md index 8cb905ad3958..74e01d5de1cd 100644 --- a/docs/cli/argo_resume.md +++ b/docs/cli/argo_resume.md @@ -37,7 +37,7 @@ argo resume WORKFLOW1 WORKFLOW2... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_retry.md b/docs/cli/argo_retry.md index 5007f620e1d7..54c9266cb3af 100644 --- a/docs/cli/argo_retry.md +++ b/docs/cli/argo_retry.md @@ -68,7 +68,7 @@ argo retry [WORKFLOW...] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_server.md b/docs/cli/argo_server.md index f1edcc154c7b..bb1d216e4269 100644 --- a/docs/cli/argo_server.md +++ b/docs/cli/argo_server.md @@ -17,7 +17,7 @@ See https://argo-workflows.readthedocs.io/en/release-3.5/argo-server/ ``` --access-control-allow-origin string Set Access-Control-Allow-Origin header in HTTP responses. - --allowed-link-protocol stringArray Allowed link protocol in configMap. Used if the allowed configMap links protocol are different from http,https. Defaults to the environment variable ALLOWED_LINK_PROTOCOL (default [http,https]) + --allowed-link-protocol stringArray Allowed protocols for links feature. Defaults to the environment variable ALLOWED_LINK_PROTOCOL: http,https (default [http,https]) --api-rate-limit uint Set limit per IP for api ratelimiter (default 1000) --auth-mode stringArray API server authentication mode. Any 1 or more length permutation of: client,server,sso (default [client]) --basehref string Value for base href in index.html. Used if the server is running behind reverse proxy under subpath different from /. Defaults to the environment variable BASE_HREF. (default "/") @@ -42,7 +42,7 @@ See https://argo-workflows.readthedocs.io/en/release-3.5/argo-server/ ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_stop.md b/docs/cli/argo_stop.md index 8475e44af78a..5dbcc31a6747 100644 --- a/docs/cli/argo_stop.md +++ b/docs/cli/argo_stop.md @@ -45,7 +45,7 @@ argo stop WORKFLOW WORKFLOW2... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_submit.md b/docs/cli/argo_submit.md index 823c427efa6b..3c1aa1cdbacd 100644 --- a/docs/cli/argo_submit.md +++ b/docs/cli/argo_submit.md @@ -59,7 +59,7 @@ argo submit [FILE... | --from `kind/name] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_suspend.md b/docs/cli/argo_suspend.md index bb06db3a0f54..6032872f78ba 100644 --- a/docs/cli/argo_suspend.md +++ b/docs/cli/argo_suspend.md @@ -27,7 +27,7 @@ argo suspend WORKFLOW1 WORKFLOW2... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_template.md b/docs/cli/argo_template.md index bbfc6a438c8d..7c1f6629bd53 100644 --- a/docs/cli/argo_template.md +++ b/docs/cli/argo_template.md @@ -15,7 +15,7 @@ argo template [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_template_create.md b/docs/cli/argo_template_create.md index 953f408f3be8..45d64999f571 100644 --- a/docs/cli/argo_template_create.md +++ b/docs/cli/argo_template_create.md @@ -17,7 +17,7 @@ argo template create FILE1 FILE2... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_template_delete.md b/docs/cli/argo_template_delete.md index 74c8077b4494..4ff7ade0d8de 100644 --- a/docs/cli/argo_template_delete.md +++ b/docs/cli/argo_template_delete.md @@ -16,7 +16,7 @@ argo template delete WORKFLOW_TEMPLATE [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_template_get.md b/docs/cli/argo_template_get.md index ee0f42f4751c..09a23f7c1e31 100644 --- a/docs/cli/argo_template_get.md +++ b/docs/cli/argo_template_get.md @@ -16,7 +16,7 @@ argo template get WORKFLOW_TEMPLATE... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_template_lint.md b/docs/cli/argo_template_lint.md index 65ac604d8a77..f59c2a26bd34 100644 --- a/docs/cli/argo_template_lint.md +++ b/docs/cli/argo_template_lint.md @@ -17,7 +17,7 @@ argo template lint (DIRECTORY | FILE1 FILE2 FILE3...) [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_template_list.md b/docs/cli/argo_template_list.md index a1dbf6aa5397..5bc5940a4aed 100644 --- a/docs/cli/argo_template_list.md +++ b/docs/cli/argo_template_list.md @@ -17,7 +17,7 @@ argo template list [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_terminate.md b/docs/cli/argo_terminate.md index 9f3530709571..c7dcfa168ff3 100644 --- a/docs/cli/argo_terminate.md +++ b/docs/cli/argo_terminate.md @@ -43,7 +43,7 @@ argo terminate WORKFLOW WORKFLOW2... [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_version.md b/docs/cli/argo_version.md index a780cc7483d6..0651ec3eaf63 100644 --- a/docs/cli/argo_version.md +++ b/docs/cli/argo_version.md @@ -16,7 +16,7 @@ argo version [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_wait.md b/docs/cli/argo_wait.md index 76d5d459afc6..040406eafb59 100644 --- a/docs/cli/argo_wait.md +++ b/docs/cli/argo_wait.md @@ -29,7 +29,7 @@ argo wait [WORKFLOW...] [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/cli/argo_watch.md b/docs/cli/argo_watch.md index 3ff9c999a5c6..842fc024c66c 100644 --- a/docs/cli/argo_watch.md +++ b/docs/cli/argo_watch.md @@ -30,7 +30,7 @@ argo watch WORKFLOW [flags] ### Options inherited from parent commands ``` - --argo-base-href string An path to use with HTTP client (e.g. due to BASE_HREF). Defaults to the ARGO_BASE_HREF environment variable. + --argo-base-href string Path to use with HTTP client due to BASE_HREF. Defaults to the ARGO_BASE_HREF environment variable. --argo-http1 If true, use the HTTP client. Defaults to the ARGO_HTTP1 environment variable. -s, --argo-server host:port API server host:port. e.g. localhost:2746. Defaults to the ARGO_SERVER environment variable. --as string Username to impersonate for the operation diff --git a/docs/environment-variables.md b/docs/environment-variables.md index efe9a030675e..6192ffc38f45 100644 --- a/docs/environment-variables.md +++ b/docs/environment-variables.md @@ -14,7 +14,7 @@ most users. Environment variables may be removed at any time. | `ALL_POD_CHANGES_SIGNIFICANT` | `bool` | `false` | Whether to consider all pod changes as significant during pod reconciliation. | | `ALWAYS_OFFLOAD_NODE_STATUS` | `bool` | `false` | Whether to always offload the node status. | | `ARCHIVED_WORKFLOW_GC_PERIOD` | `time.Duration` | `24h` | The periodicity for GC of archived workflows. | -| `ARGO_PPROF` | `bool` | `false` | Enable `pprof` endpoints | +| `ARGO_PPROF` | `bool` | `false` | Enable [`pprof`](https://go.dev/blog/pprof) endpoints | | `ARGO_PROGRESS_PATCH_TICK_DURATION` | `time.Duration` | `1m` | How often self reported progress is patched into the pod annotations which means how long it takes until the controller picks up the progress change. Set to 0 to disable self reporting progress. | | `ARGO_PROGRESS_FILE_TICK_DURATION` | `time.Duration` | `3s` | How often the progress file is read by the executor. Set to 0 to disable self reporting progress. | | `ARGO_REMOVE_PVC_PROTECTION_FINALIZER` | `bool` | `true` | Remove the `kubernetes.io/pvc-protection` finalizer from persistent volume claims (PVC) after marking PVCs created for the workflow for deletion, so deleted is not blocked until the pods are deleted. [#6629](https://github.com/argoproj/argo-workflows/issues/6629) | @@ -123,6 +123,8 @@ spec: | Name | Type | Default | Description | |----------------------------------------|-----------------|---------|--------------------------------------------------------------------------------------------------------| +| `ARGO_DEBUG_PAUSE_AFTER` | `bool` | `false` | Enable [Debug Pause](debug-pause.md) after step execution +| `ARGO_DEBUG_PAUSE_BEFORE` | `bool` | `false` | Enable [Debug Pause](debug-pause.md) before step execution | `EXECUTOR_RETRY_BACKOFF_DURATION` | `time.Duration` | `1s` | The retry back-off duration when the workflow executor performs retries. | | `EXECUTOR_RETRY_BACKOFF_FACTOR` | `float` | `1.6` | The retry back-off factor when the workflow executor performs retries. | | `EXECUTOR_RETRY_BACKOFF_JITTER` | `float` | `0.5` | The retry back-off jitter when the workflow executor performs retries. | @@ -151,8 +153,57 @@ data: | Name | Type | Default | Description | |--------------------------------------------|----------|---------|-------------------------------------------------------------------------------------------------------------------------| +| `ALLOWED_LINK_PROTOCOL` | `string` | `http,https` | List of comma separated protocols allowed for the [Links feature](links.md) +| `ARGO_ARTIFACT_SERVER` | `bool` | `true` | Enable [Workflow Archive](workflow-archive.md) endpoints +| `ARGO_PPROF` | `bool` | `false` | Enable [`pprof`](https://go.dev/blog/pprof) endpoints +| `ARGO_SERVER_METRICS_AUTH` | `bool` | `true` | Enable auth on the `/metrics` endpoint +| `BASE_HREF` | `string` | `/` | [Base HREF](argo-server.md#base-href) of the Server | `DISABLE_VALUE_LIST_RETRIEVAL_KEY_PATTERN` | `string` | `""` | Disable the retrieval of the list of label values for keys based on this regular expression. | | `FIRST_TIME_USER_MODAL` | `bool` | `true` | Show this modal. | | `FEEDBACK_MODAL` | `bool` | `true` | Show this modal. | +| `GRPC_MESSAGE_SIZE` | `string` | `104857600` | Use different GRPC Max message size for Server (supporting huge workflows). | +| `IP_KEY_FUNC_HEADERS` | `string` | `""` | List of comma separated request headers containing IPs to use for rate limiting. For example, "X-Forwarded-For,X-Real-IP". By default, uses the request's remote IP address. | | `NEW_VERSION_MODAL` | `bool` | `true` | Show this modal. | | `POD_NAMES` | `string` | `v2` | Whether to have pod names contain the template name (v2) or be the node id (v1) - should be set the same for Controller | +| `SSO_DELEGATE_RBAC_TO_NAMESPACE` | `bool` | `false` | Enable [SSO RBAC Namespace Delegation](argo-server-sso.md#sso-rbac-namespace-delegation) + +CLI parameters of the Server can be specified as environment variables with the `ARGO_` prefix. +For example: + +```bash +argo server --managed-namespace=argo +``` + +Can be expressed as: + +```bash +ARGO_MANAGED_NAMESPACE=argo argo server +``` + +You can set environment variables for the Server Deployment's container spec like the following: + +```yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: argo-server +spec: + selector: + matchLabels: + app: argo-server + template: + metadata: + labels: + app: argo-server + spec: + containers: + - args: + - server + image: argoproj/argocli:latest + name: argo-server + env: + - name: GRPC_MESSAGE_SIZE + value: "209715200" + ports: + # ... +``` diff --git a/hack/check-env-doc.sh b/hack/check-env-doc.sh index bbb7b8fa1335..88738de349ef 100755 --- a/hack/check-env-doc.sh +++ b/hack/check-env-doc.sh @@ -2,32 +2,42 @@ echo "Checking docs/environment-variables.md for completeness..." +# Directories to check. For cmd/, only check Controller, Executor, and Server. The CLI has generated docs +dirs=(./workflow ./persist ./util ./server ./cmd/argo/commands/server.go ./cmd/argoexec ./cmd/workflow-controller) +not_found="false" + function check-used { - grep "| \`" < ./docs/environment-variables.md \ - | awk '{gsub(/\`/, "", $2); print $2; }' \ - | while read -r x; do - var="${x%\`}"; - var="${var#\`}"; - if ! grep -qR --exclude="*_test.go" "$var" ./cmd/workflow-controller ./workflow ./persist ./util ./server ; then - echo "❌ Documented variable $var in docs/environment-variables.md is not used anywhere" >&2; - exit 1; - fi; - done + mapfile -t check < <(grep "| \`" < ./docs/environment-variables.md \ + | awk '{gsub(/\`/, "", $2); print $2; }') + + for x in "${check[@]}"; do + var="${x%\`}"; + var="${var#\`}"; + if ! grep -qR --exclude="*_test.go" "$var" "${dirs[@]}" ; then + echo "❌ Documented variable $var in docs/environment-variables.md is not used anywhere" >&2; + not_found="true"; + fi + done } function check-documented { - grep -REh --exclude="*_test.go" "Getenv.*?\(|LookupEnv.*?\(" ./workflow ./persist ./util \ - | grep -Eo "\"[A-Z_]+?\"" \ - | sort \ - | uniq \ - | while read -r x; do - var="${x%\"}"; - var="${var#\"}"; - if ! grep -q "$var" docs/environment-variables.md; then - echo "❌ Variable $var not documented in docs/environment-variables.md" >&2; - exit 1; - fi; - done + mapfile -t check < <(grep -REh --exclude="*_test.go" "Getenv.*?\(|LookupEnv.*?\(|env.Get*?\(" "${dirs[@]}" \ + | grep -Eo "\"[A-Z_]+?\"" \ + | sort \ + | uniq) + + for x in "${check[@]}"; do + var="${x%\"}"; + var="${var#\"}"; + if ! grep -q "$var" docs/environment-variables.md; then + echo "❌ Variable $var not documented in docs/environment-variables.md" >&2; + not_found="true"; + fi + done } -check-used && check-documented && echo "✅ Success - all environment variables appear to be documented" +check-used && check-documented; +if [[ "$not_found" == "true" ]]; then + exit 1; +fi +echo "✅ Success - all environment variables appear to be documented" From a5bf99690c8b8189c439f2775685108e84a9cd02 Mon Sep 17 00:00:00 2001 From: Alan Clucas Date: Wed, 24 Jan 2024 14:55:46 +0000 Subject: [PATCH 23/38] fix: make etcd errors transient (#12567) Signed-off-by: Alan Clucas Signed-off-by: Isitha Subasinghe --- util/errors/errors.go | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/util/errors/errors.go b/util/errors/errors.go index 156db1d0308a..496176070128 100644 --- a/util/errors/errors.go +++ b/util/errors/errors.go @@ -28,7 +28,15 @@ func IsTransientErr(err error) bool { return false } err = argoerrs.Cause(err) - isTransient := isExceededQuotaErr(err) || apierr.IsTooManyRequests(err) || isResourceQuotaConflictErr(err) || isResourceQuotaTimeoutErr(err) || isTransientNetworkErr(err) || apierr.IsServerTimeout(err) || apierr.IsServiceUnavailable(err) || matchTransientErrPattern(err) || + isTransient := isExceededQuotaErr(err) || + apierr.IsTooManyRequests(err) || + isResourceQuotaConflictErr(err) || + isResourceQuotaTimeoutErr(err) || + isTransientNetworkErr(err) || + apierr.IsServerTimeout(err) || + apierr.IsServiceUnavailable(err) || + isTransientEtcdErr(err) || + matchTransientErrPattern(err) || errors.Is(err, NewErrTransient("")) if isTransient { log.Infof("Transient error: %v", err) @@ -61,6 +69,16 @@ func isResourceQuotaTimeoutErr(err error) bool { return apierr.IsInternalError(err) && strings.Contains(err.Error(), "resource quota evaluation timed out") } +func isTransientEtcdErr(err error) bool { + // Some clusters expose these (transient) etcd errors to the caller + if strings.Contains(err.Error(), "etcdserver: leader changed") { + return true + } else if strings.Contains(err.Error(), "etcdserver: request timed out") { + return true + } + return false +} + func isTransientNetworkErr(err error) bool { switch err.(type) { case *net.DNSError, *net.OpError, net.UnknownNetworkError: From 901cfb63632903b59b0f6858e813b85a104cb486 Mon Sep 17 00:00:00 2001 From: Paolo Quadri Date: Thu, 8 Feb 2024 23:40:27 +0100 Subject: [PATCH 24/38] fix: controller option to not watch configmap (#12622) Signed-off-by: Paolo Quadri Signed-off-by: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Co-authored-by: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Signed-off-by: Isitha Subasinghe --- .spelling | 8 ++++++++ docs/environment-variables.md | 1 + workflow/controller/controller.go | 6 +++++- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/.spelling b/.spelling index 146c77176172..ae9c1ac6b8bc 100644 --- a/.spelling +++ b/.spelling @@ -41,6 +41,14 @@ ClusterRoleBinding CRD CRDs CloudSQL +ClusterRoleBinding +ClusterRoles +Codespaces +ConfigMap +ConfigMaps +Couler +CronWorkflow +CronWorkflows DataDog Dataflow DeleteObject diff --git a/docs/environment-variables.md b/docs/environment-variables.md index 6192ffc38f45..7c10ec5de0d5 100644 --- a/docs/environment-variables.md +++ b/docs/environment-variables.md @@ -53,6 +53,7 @@ most users. Environment variables may be removed at any time. | `WF_DEL_PROPAGATION_POLICY` | `string` | `""` | The deletion propagation policy for workflows. | | `WORKFLOW_GC_PERIOD` | `time.Duration` | `5m` | The periodicity for GC of workflows. | | `SEMAPHORE_NOTIFY_DELAY` | `time.Duration` | `1s` | Tuning Delay when notifying semaphore waiters about availability in the semaphore | +| `WATCH_CONTROLLER_SEMAPHORE_CONFIGMAPS` | `bool` | `true` | Whether to watch the Controller's ConfigMap and semaphore ConfigMaps for run-time changes. When disabled, the Controller will only read these ConfigMaps once and will have to be manually restarted to pick up new changes. | CLI parameters of the `argo-server` and `workflow-controller` can be specified as environment variables with the `ARGO_` prefix. For example: diff --git a/workflow/controller/controller.go b/workflow/controller/controller.go index 6451b4e9ce12..92928277a84f 100644 --- a/workflow/controller/controller.go +++ b/workflow/controller/controller.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "os" "strconv" gosync "sync" "syscall" @@ -299,7 +300,10 @@ func (wfc *WorkflowController) Run(ctx context.Context, wfWorkers, workflowTTLWo log.Fatal(err) } - go wfc.runConfigMapWatcher(ctx.Done()) + if os.Getenv("WATCH_CONTROLLER_SEMAPHORE_CONFIGMAPS") != "false" { + go wfc.runConfigMapWatcher(ctx.Done()) + } + go wfc.wfInformer.Run(ctx.Done()) go wfc.wftmplInformer.Informer().Run(ctx.Done()) go wfc.podInformer.Run(ctx.Done()) From a98027078fdd98113644b9d3e6833e79ecc57d2f Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Sun, 28 Jan 2024 12:49:49 +0800 Subject: [PATCH 25/38] fix: make sure taskresult completed when mark node succeed when it has outputs (#12537) Signed-off-by: shuangkun Signed-off-by: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Co-authored-by: Julie Vogelman Signed-off-by: Isitha Subasinghe --- api/jsonschema/schema.json | 2 +- api/openapi-spec/swagger.json | 2 +- docs/fields.md | 2 +- pkg/apis/workflow/v1alpha1/generated.proto | 2 +- .../workflow/v1alpha1/openapi_generated.go | 2 +- pkg/apis/workflow/v1alpha1/workflow_types.go | 10 +- ...oArgoprojWorkflowV1alpha1WorkflowStatus.md | 2 +- ...oproj_workflow_v1alpha1_workflow_status.py | 4 +- ...oArgoprojWorkflowV1alpha1WorkflowStatus.md | 2 +- workflow/controller/controller_test.go | 4 + workflow/controller/exit_handler_test.go | 4 + workflow/controller/hooks_test.go | 4 +- workflow/controller/operator.go | 28 ++++- .../controller/operator_concurrency_test.go | 10 +- workflow/controller/operator_test.go | 116 ++++++++++++++++++ 15 files changed, 174 insertions(+), 20 deletions(-) diff --git a/api/jsonschema/schema.json b/api/jsonschema/schema.json index 17dee0928faf..769022880084 100644 --- a/api/jsonschema/schema.json +++ b/api/jsonschema/schema.json @@ -7616,7 +7616,7 @@ "additionalProperties": { "type": "boolean" }, - "description": "TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection.", + "description": "TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.", "type": "object" } }, diff --git a/api/openapi-spec/swagger.json b/api/openapi-spec/swagger.json index b878339c3219..058779f7bc4b 100644 --- a/api/openapi-spec/swagger.json +++ b/api/openapi-spec/swagger.json @@ -11536,7 +11536,7 @@ "$ref": "#/definitions/io.argoproj.workflow.v1alpha1.SynchronizationStatus" }, "taskResultsCompletionStatus": { - "description": "TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection.", + "description": "TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.", "type": "object", "additionalProperties": { "type": "boolean" diff --git a/docs/fields.md b/docs/fields.md index 9051a3548098..f86743b23754 100644 --- a/docs/fields.md +++ b/docs/fields.md @@ -853,7 +853,7 @@ WorkflowStatus contains overall status information about a workflow |`storedTemplates`|[`Template`](#template)|StoredTemplates is a mapping between a template ref and the node's status.| |`storedWorkflowTemplateSpec`|[`WorkflowSpec`](#workflowspec)|StoredWorkflowSpec stores the WorkflowTemplate spec for future execution.| |`synchronization`|[`SynchronizationStatus`](#synchronizationstatus)|Synchronization stores the status of synchronization locks| -|`taskResultsCompletionStatus`|`Map< boolean , string >`|TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection.| +|`taskResultsCompletionStatus`|`Map< boolean , string >`|TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.| ## CronWorkflowSpec diff --git a/pkg/apis/workflow/v1alpha1/generated.proto b/pkg/apis/workflow/v1alpha1/generated.proto index ccd66ad0d35b..a6f52685499f 100644 --- a/pkg/apis/workflow/v1alpha1/generated.proto +++ b/pkg/apis/workflow/v1alpha1/generated.proto @@ -2118,7 +2118,7 @@ message WorkflowStatus { // ArtifactGCStatus maintains the status of Artifact Garbage Collection optional ArtGCStatus artifactGCStatus = 19; - // TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection. + // TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection. map taskResultsCompletionStatus = 20; } diff --git a/pkg/apis/workflow/v1alpha1/openapi_generated.go b/pkg/apis/workflow/v1alpha1/openapi_generated.go index 8e64e3ae03a6..2b7a4b5edc24 100644 --- a/pkg/apis/workflow/v1alpha1/openapi_generated.go +++ b/pkg/apis/workflow/v1alpha1/openapi_generated.go @@ -7842,7 +7842,7 @@ func schema_pkg_apis_workflow_v1alpha1_WorkflowStatus(ref common.ReferenceCallba }, "taskResultsCompletionStatus": { SchemaProps: spec.SchemaProps{ - Description: "TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection.", + Description: "TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.", Type: []string{"object"}, AdditionalProperties: &spec.SchemaOrBool{ Allows: true, diff --git a/pkg/apis/workflow/v1alpha1/workflow_types.go b/pkg/apis/workflow/v1alpha1/workflow_types.go index 3d29464e67fe..6aab5cadcc4e 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_types.go +++ b/pkg/apis/workflow/v1alpha1/workflow_types.go @@ -1940,7 +1940,7 @@ type WorkflowStatus struct { // ArtifactGCStatus maintains the status of Artifact Garbage Collection ArtifactGCStatus *ArtGCStatus `json:"artifactGCStatus,omitempty" protobuf:"bytes,19,opt,name=artifactGCStatus"` - // TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection. + // TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection. TaskResultsCompletionStatus map[string]bool `json:"taskResultsCompletionStatus,omitempty" protobuf:"bytes,20,opt,name=taskResultsCompletionStatus"` } @@ -1967,6 +1967,14 @@ func (ws *WorkflowStatus) TaskResultsInProgress() bool { return false } +func (ws *WorkflowStatus) IsTaskResultIncomplete(name string) bool { + value, found := ws.TaskResultsCompletionStatus[name] + if found { + return !value + } + return true +} + func (ws *WorkflowStatus) IsOffloadNodeStatus() bool { return ws.OffloadNodeStatusVersion != "" } diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md index c008ea6ef78c..8fa76f9bbbc7 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md @@ -26,7 +26,7 @@ Name | Type | Description | Notes **storedTemplates** | [**Map<String, IoArgoprojWorkflowV1alpha1Template>**](IoArgoprojWorkflowV1alpha1Template.md) | StoredTemplates is a mapping between a template ref and the node's status. | [optional] **storedWorkflowTemplateSpec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | [optional] **synchronization** | [**IoArgoprojWorkflowV1alpha1SynchronizationStatus**](IoArgoprojWorkflowV1alpha1SynchronizationStatus.md) | | [optional] -**taskResultsCompletionStatus** | **Map<String, Boolean>** | TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection. | [optional] +**taskResultsCompletionStatus** | **Map<String, Boolean>** | TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection. | [optional] diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py index e71435a478b6..8820976ed168 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_workflow_status.py @@ -210,7 +210,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 stored_templates ({str: (IoArgoprojWorkflowV1alpha1Template,)}): StoredTemplates is a mapping between a template ref and the node's status.. [optional] # noqa: E501 stored_workflow_template_spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): [optional] # noqa: E501 synchronization (IoArgoprojWorkflowV1alpha1SynchronizationStatus): [optional] # noqa: E501 - task_results_completion_status ({str: (bool,)}): TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection.. [optional] # noqa: E501 + task_results_completion_status ({str: (bool,)}): TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) @@ -310,7 +310,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 stored_templates ({str: (IoArgoprojWorkflowV1alpha1Template,)}): StoredTemplates is a mapping between a template ref and the node's status.. [optional] # noqa: E501 stored_workflow_template_spec (IoArgoprojWorkflowV1alpha1WorkflowSpec): [optional] # noqa: E501 synchronization (IoArgoprojWorkflowV1alpha1SynchronizationStatus): [optional] # noqa: E501 - task_results_completion_status ({str: (bool,)}): TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection.. [optional] # noqa: E501 + task_results_completion_status ({str: (bool,)}): TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection.. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md index 06b3715f3745..c46412b8d3c8 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1WorkflowStatus.md @@ -23,7 +23,7 @@ Name | Type | Description | Notes **stored_templates** | [**{str: (IoArgoprojWorkflowV1alpha1Template,)}**](IoArgoprojWorkflowV1alpha1Template.md) | StoredTemplates is a mapping between a template ref and the node's status. | [optional] **stored_workflow_template_spec** | [**IoArgoprojWorkflowV1alpha1WorkflowSpec**](IoArgoprojWorkflowV1alpha1WorkflowSpec.md) | | [optional] **synchronization** | [**IoArgoprojWorkflowV1alpha1SynchronizationStatus**](IoArgoprojWorkflowV1alpha1SynchronizationStatus.md) | | [optional] -**task_results_completion_status** | **{str: (bool,)}** | TaskResultsCompletionStatus tracks task result completion status (mapped by pod name). Used to prevent premature archiving and garbage collection. | [optional] +**task_results_completion_status** | **{str: (bool,)}** | TaskResultsCompletionStatus tracks task result completion status (mapped by node ID). Used to prevent premature archiving and garbage collection. | [optional] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/workflow/controller/controller_test.go b/workflow/controller/controller_test.go index 15468eb444e5..6bee65ee23fc 100644 --- a/workflow/controller/controller_test.go +++ b/workflow/controller/controller_test.go @@ -510,6 +510,10 @@ func makePodsPhase(ctx context.Context, woc *wfOperationCtx, phase apiv1.PodPhas if err != nil { panic(err) } + if phase == apiv1.PodSucceeded { + nodeID := woc.nodeID(&pod) + woc.wf.Status.MarkTaskResultComplete(nodeID) + } } } } diff --git a/workflow/controller/exit_handler_test.go b/workflow/controller/exit_handler_test.go index 32bd3dbe8f6c..daed6956a6b2 100644 --- a/workflow/controller/exit_handler_test.go +++ b/workflow/controller/exit_handler_test.go @@ -209,6 +209,7 @@ func TestStepsOnExitTmplWithArt(t *testing.T) { }, } woc.wf.Status.Nodes[idx] = node + woc.wf.Status.MarkTaskResultComplete(node.ID) } } woc1 := newWorkflowOperationCtx(woc.wf, controller) @@ -283,6 +284,7 @@ func TestDAGOnExitTmplWithArt(t *testing.T) { }, } woc.wf.Status.Nodes[idx] = node + woc.wf.Status.MarkTaskResultComplete(node.ID) } } woc1 := newWorkflowOperationCtx(woc.wf, controller) @@ -383,6 +385,7 @@ func TestStepsTmplOnExit(t *testing.T) { }, } woc2.wf.Status.Nodes[idx] = node + woc.wf.Status.MarkTaskResultComplete(node.ID) } } @@ -487,6 +490,7 @@ func TestDAGOnExit(t *testing.T) { }, } woc2.wf.Status.Nodes[idx] = node + woc.wf.Status.MarkTaskResultComplete(node.ID) } } woc3 := newWorkflowOperationCtx(woc2.wf, controller) diff --git a/workflow/controller/hooks_test.go b/workflow/controller/hooks_test.go index 7a4ab353cb81..77b49230ea2b 100644 --- a/workflow/controller/hooks_test.go +++ b/workflow/controller/hooks_test.go @@ -997,7 +997,7 @@ spec: assert.Equal(t, wfv1.NodePending, node.Phase) makePodsPhase(ctx, woc, apiv1.PodFailed) woc = newWorkflowOperationCtx(woc.wf, controller) - err := woc.podReconciliation(ctx) + err, _ := woc.podReconciliation(ctx) assert.NoError(t, err) node = woc.wf.Status.Nodes.FindByDisplayName("hook-failures.hooks.failure") assert.NotNil(t, node) @@ -1140,6 +1140,7 @@ spec: pod, _ := podcs.Get(ctx, "hook-running", metav1.GetOptions{}) pod.Status.Phase = apiv1.PodSucceeded updatedPod, _ := podcs.Update(ctx, pod, metav1.UpdateOptions{}) + woc.wf.Status.MarkTaskResultComplete(woc.nodeID(pod)) _ = woc.controller.podInformer.GetStore().Update(updatedPod) woc = newWorkflowOperationCtx(woc.wf, controller) woc.operate(ctx) @@ -1231,6 +1232,7 @@ spec: pod.Status.Phase = apiv1.PodSucceeded updatedPod, _ := podcs.Update(ctx, &pod, metav1.UpdateOptions{}) _ = woc.controller.podInformer.GetStore().Update(updatedPod) + woc.wf.Status.MarkTaskResultComplete(woc.nodeID(&pod)) woc = newWorkflowOperationCtx(woc.wf, controller) woc.operate(ctx) assert.Equal(t, wfv1.Progress("1/2"), woc.wf.Status.Progress) diff --git a/workflow/controller/operator.go b/workflow/controller/operator.go index 48b464af21e1..8de46065b06b 100644 --- a/workflow/controller/operator.go +++ b/workflow/controller/operator.go @@ -307,7 +307,7 @@ func (woc *wfOperationCtx) operate(ctx context.Context) { woc.wf.Status.EstimatedDuration = woc.estimateWorkflowDuration() } else { woc.workflowDeadline = woc.getWorkflowDeadline() - err = woc.podReconciliation(ctx) + err, podReconciliationCompleted := woc.podReconciliation(ctx) if err == nil { woc.failSuspendedAndPendingNodesAfterDeadlineOrShutdown() } @@ -318,6 +318,12 @@ func (woc *wfOperationCtx) operate(ctx context.Context) { // TODO: we need to re-add to the workqueue, but should happen in caller return } + + if !podReconciliationCompleted { + woc.log.WithField("workflow", woc.wf.ObjectMeta.Name).Info("pod reconciliation didn't complete, will retry") + woc.requeue() + return + } } if woc.ShouldSuspend() { @@ -1088,15 +1094,17 @@ func (woc *wfOperationCtx) processNodeRetries(node *wfv1.NodeStatus, retryStrate // pods and update the node state before continuing the evaluation of the workflow. // Records all pods which were observed completed, which will be labeled completed=true // after successful persist of the workflow. -func (woc *wfOperationCtx) podReconciliation(ctx context.Context) error { +// returns whether pod reconciliation successfully completed +func (woc *wfOperationCtx) podReconciliation(ctx context.Context) (error, bool) { podList, err := woc.getAllWorkflowPods() if err != nil { - return err + return err, false } seenPods := make(map[string]*apiv1.Pod) seenPodLock := &sync.Mutex{} wfNodesLock := &sync.RWMutex{} podRunningCondition := wfv1.Condition{Type: wfv1.ConditionTypePodRunning, Status: metav1.ConditionFalse} + taskResultIncomplete := false performAssessment := func(pod *apiv1.Pod) { if pod == nil { return @@ -1115,6 +1123,12 @@ func (woc *wfOperationCtx) podReconciliation(ctx context.Context) error { node, err := woc.wf.Status.Nodes.Get(nodeID) if err == nil { if newState := woc.assessNodeStatus(pod, node); newState != nil { + // Check whether its taskresult is in an incompleted state. + if newState.Succeeded() && woc.wf.Status.IsTaskResultIncomplete(node.ID) { + woc.log.WithFields(log.Fields{"nodeID": newState.ID}).Debug("Taskresult of the node not yet completed") + taskResultIncomplete = true + return + } woc.addOutputsToGlobalScope(newState.Outputs) if newState.MemoizationStatus != nil { if newState.Succeeded() { @@ -1158,6 +1172,12 @@ func (woc *wfOperationCtx) podReconciliation(ctx context.Context) error { wg.Wait() + // If true, it means there are some nodes which have outputs we wanted to be marked succeed, but the node's taskresults didn't completed. + // We should make sure the taskresults processing is complete as it will be possible to reference it in the next step. + if taskResultIncomplete { + return nil, false + } + woc.wf.Status.Conditions.UpsertCondition(podRunningCondition) // Now check for deleted pods. Iterate our nodes. If any one of our nodes does not show up in @@ -1197,7 +1217,7 @@ func (woc *wfOperationCtx) podReconciliation(ctx context.Context) error { woc.markNodePhase(node.Name, wfv1.NodeError, "pod deleted") } } - return nil + return nil, !taskResultIncomplete } func (woc *wfOperationCtx) nodeID(pod *apiv1.Pod) string { diff --git a/workflow/controller/operator_concurrency_test.go b/workflow/controller/operator_concurrency_test.go index bd9b6393455e..6e0d5c6486cf 100644 --- a/workflow/controller/operator_concurrency_test.go +++ b/workflow/controller/operator_concurrency_test.go @@ -196,7 +196,7 @@ func TestSemaphoreTmplLevel(t *testing.T) { woc_two.operate(ctx) // Check Node status - err = woc_two.podReconciliation(ctx) + err, _ = woc_two.podReconciliation(ctx) assert.NoError(t, err) for _, node := range woc_two.wf.Status.Nodes { assert.Equal(t, wfv1.NodePending, node.Phase) @@ -257,7 +257,7 @@ func TestSemaphoreScriptTmplLevel(t *testing.T) { woc_two.operate(ctx) // Check Node status - err = woc_two.podReconciliation(ctx) + err, _ = woc_two.podReconciliation(ctx) assert.NoError(t, err) for _, node := range woc_two.wf.Status.Nodes { assert.Equal(t, wfv1.NodePending, node.Phase) @@ -319,7 +319,7 @@ func TestSemaphoreScriptConfigMapInDifferentNamespace(t *testing.T) { woc_two.operate(ctx) // Check Node status - err = woc_two.podReconciliation(ctx) + err, _ = woc_two.podReconciliation(ctx) assert.NoError(t, err) for _, node := range woc_two.wf.Status.Nodes { assert.Equal(t, wfv1.NodePending, node.Phase) @@ -379,7 +379,7 @@ func TestSemaphoreResourceTmplLevel(t *testing.T) { woc_two.operate(ctx) // Check Node status - err = woc_two.podReconciliation(ctx) + err, _ = woc_two.podReconciliation(ctx) assert.NoError(t, err) for _, node := range woc_two.wf.Status.Nodes { assert.Equal(t, wfv1.NodePending, node.Phase) @@ -416,7 +416,7 @@ func TestSemaphoreWithOutConfigMap(t *testing.T) { wf, err := controller.wfclientset.ArgoprojV1alpha1().Workflows(wf.Namespace).Create(ctx, wf, metav1.CreateOptions{}) assert.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) - err = woc.podReconciliation(ctx) + err, _ = woc.podReconciliation(ctx) assert.NoError(t, err) for _, node := range woc.wf.Status.Nodes { assert.Equal(t, wfv1.NodePending, node.Phase) diff --git a/workflow/controller/operator_test.go b/workflow/controller/operator_test.go index 85e91acdf638..3c0e5d07bb54 100644 --- a/workflow/controller/operator_test.go +++ b/workflow/controller/operator_test.go @@ -1769,6 +1769,8 @@ func TestWorkflowStepRetry(t *testing.T) { wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) assert.Nil(t, err) woc = newWorkflowOperationCtx(wf, controller) + nodeID := woc.nodeID(&pods.Items[0]) + woc.wf.Status.MarkTaskResultComplete(nodeID) woc.operate(ctx) // fail the second pod @@ -10162,3 +10164,117 @@ status: woc.operate(ctx) } + +var needReconcileWorklfow = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + name: steps-need-reconcile +spec: + entrypoint: hello-hello-hello + arguments: + parameters: + - name: message1 + value: hello world + - name: message2 + value: foobar + # This spec contains two templates: hello-hello-hello and whalesay + templates: + - name: hello-hello-hello + # Instead of just running a container + # This template has a sequence of steps + steps: + - - name: hello1 # hello1 is run before the following steps + continueOn: {} + template: whalesay + arguments: + parameters: + - name: message + value: "hello1" + - name: workflow_artifact_key + value: "{{ workflow.parameters.message2}}" + - - name: hello2a # double dash => run after previous step + template: whalesay + arguments: + parameters: + - name: message + value: "{{=steps['hello1'].outputs.parameters['workflow_artifact_key']}}" + + # This is the same template as from the previous example + - name: whalesay + inputs: + parameters: + - name: message + outputs: + parameters: + - name: workflow_artifact_key + value: '{{workflow.name}}' + script: + image: python:alpine3.6 + command: [python] + env: + - name: message + value: "{{inputs.parameters.message}}" + source: | + import random + i = random.randint(1, 100) + print(i)` + +// TestWorkflowNeedReconcile test whether a workflow need reconcile taskresults. +func TestWorkflowNeedReconcile(t *testing.T) { + cancel, controller := newController() + defer cancel() + ctx := context.Background() + wfcset := controller.wfclientset.ArgoprojV1alpha1().Workflows("") + wf := wfv1.MustUnmarshalWorkflow(needReconcileWorklfow) + wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) + assert.Nil(t, err) + wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + assert.Nil(t, err) + woc := newWorkflowOperationCtx(wf, controller) + woc.operate(ctx) + pods, err := listPods(woc) + assert.Nil(t, err) + assert.Equal(t, 1, len(pods.Items)) + + // complete the first pod + makePodsPhase(ctx, woc, apiv1.PodSucceeded) + wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + assert.Nil(t, err) + woc = newWorkflowOperationCtx(wf, controller) + err, podReconciliationCompleted := woc.podReconciliation(ctx) + assert.Nil(t, err) + assert.False(t, podReconciliationCompleted) + + for idx, node := range woc.wf.Status.Nodes { + if strings.Contains(node.Name, ".hello1") { + node.Outputs = &wfv1.Outputs{ + Parameters: []wfv1.Parameter{ + { + Name: "workflow_artifact_key", + Value: wfv1.AnyStringPtr("steps-need-reconcile"), + }, + }, + } + woc.wf.Status.Nodes[idx] = node + woc.wf.Status.MarkTaskResultComplete(node.ID) + } + } + err, podReconciliationCompleted = woc.podReconciliation(ctx) + assert.Nil(t, err) + assert.True(t, podReconciliationCompleted) + woc.operate(ctx) + + // complete the second pod + makePodsPhase(ctx, woc, apiv1.PodSucceeded) + wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) + assert.Nil(t, err) + woc = newWorkflowOperationCtx(wf, controller) + woc.operate(ctx) + pods, err = listPods(woc) + assert.Nil(t, err) + if assert.Equal(t, 2, len(pods.Items)) { + assert.Equal(t, "hello1", pods.Items[0].Spec.Containers[1].Env[0].Value) + assert.Equal(t, "steps-need-reconcile", pods.Items[1].Spec.Containers[1].Env[0].Value) + } +} From 88332d4c37f34a71b5adbd4e9d720ff4645864dd Mon Sep 17 00:00:00 2001 From: Isitha Subasinghe Date: Wed, 31 Jan 2024 02:05:48 +1100 Subject: [PATCH 26/38] fix: upgrade expr-lang. Fixes #12037 (#12573) Signed-off-by: isubasinghe Signed-off-by: Isitha Subasinghe Signed-off-by: Isitha Subasinghe --- api/jsonschema/schema.json | 4 +- api/openapi-spec/swagger.json | 4 +- docs/argo-server-sso.md | 2 +- docs/data-sourcing-and-transformation.md | 2 +- docs/events.md | 2 +- docs/executor_swagger.md | 2 +- docs/fields.md | 2 +- docs/retries.md | 2 +- docs/variables.md | 2 +- go.mod | 4 +- go.sum | 8 +-- pkg/apis/workflow/v1alpha1/event_types.go | 2 +- pkg/apis/workflow/v1alpha1/generated.proto | 4 +- .../workflow/v1alpha1/openapi_generated.go | 4 +- pkg/apis/workflow/v1alpha1/workflow_types.go | 2 +- pkg/plugins/executor/swagger.yml | 2 +- .../docs/IoArgoprojWorkflowV1alpha1Event.md | 2 +- .../IoArgoprojWorkflowV1alpha1ValueFrom.md | 2 +- .../io_argoproj_workflow_v1alpha1_event.py | 4 +- ...o_argoproj_workflow_v1alpha1_value_from.py | 4 +- .../docs/IoArgoprojWorkflowV1alpha1Event.md | 2 +- .../IoArgoprojWorkflowV1alpha1ValueFrom.md | 2 +- server/event/dispatch/operation.go | 15 +++- server/event/dispatch/operation_test.go | 6 +- test/e2e/argo_server_test.go | 2 +- test/e2e/expr_lang.go | 70 +++++++++++++++++++ util/expr/argoexpr/eval.go | 8 ++- util/expr/argoexpr/eval_test.go | 18 +++++ util/template/expression_template.go | 17 +++-- util/template/replace_test.go | 2 +- util/template/resolve_var.go | 8 ++- util/template/resolve_var_test.go | 4 +- workflow/controller/operator.go | 8 ++- workflow/controller/scope.go | 18 ++++- workflow/data/data.go | 9 ++- 35 files changed, 191 insertions(+), 58 deletions(-) create mode 100644 test/e2e/expr_lang.go diff --git a/api/jsonschema/schema.json b/api/jsonschema/schema.json index 769022880084..61302ca62125 100644 --- a/api/jsonschema/schema.json +++ b/api/jsonschema/schema.json @@ -4719,7 +4719,7 @@ "io.argoproj.workflow.v1alpha1.Event": { "properties": { "selector": { - "description": "Selector (https://github.com/antonmedv/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"`", + "description": "Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"`", "type": "string" } }, @@ -6932,7 +6932,7 @@ "type": "string" }, "event": { - "description": "Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`", + "description": "Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`", "type": "string" }, "expression": { diff --git a/api/openapi-spec/swagger.json b/api/openapi-spec/swagger.json index 058779f7bc4b..78eafafe2486 100644 --- a/api/openapi-spec/swagger.json +++ b/api/openapi-spec/swagger.json @@ -8663,7 +8663,7 @@ ], "properties": { "selector": { - "description": "Selector (https://github.com/antonmedv/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"`", + "description": "Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"`", "type": "string" } } @@ -10873,7 +10873,7 @@ "type": "string" }, "event": { - "description": "Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`", + "description": "Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`", "type": "string" }, "expression": { diff --git a/docs/argo-server-sso.md b/docs/argo-server-sso.md index 7dd96cdc678a..4ab4927748b8 100644 --- a/docs/argo-server-sso.md +++ b/docs/argo-server-sso.md @@ -83,7 +83,7 @@ metadata: # Must evaluate to a boolean. # If you want an account to be the default to use, this rule can be "true". # Details of the expression language are available in - # https://github.com/antonmedv/expr/blob/master/docs/Language-Definition.md. + # https://github.com/expr-lang/expr/blob/master/docs/language-definition.md. workflows.argoproj.io/rbac-rule: "'admin' in groups" # The precedence is used to determine which service account to use whe # Precedence is an integer. It may be negative. If omitted, it defaults to "0". diff --git a/docs/data-sourcing-and-transformation.md b/docs/data-sourcing-and-transformation.md index 8eacded5f6b6..0c5f78505835 100644 --- a/docs/data-sourcing-and-transformation.md +++ b/docs/data-sourcing-and-transformation.md @@ -53,6 +53,6 @@ A `data` template must always contain a `source`. Current available sources: A `data` template may contain any number of transformations (or zero). The transformations will be applied serially in order. Current available transformations: -* `expression`: an [`expr`](https://github.com/antonmedv/expr) expression. See language definition [here](https://github.com/antonmedv/expr/blob/master/docs/Language-Definition.md). When defining `expr` expressions Argo will pass the available data to the environment as a variable called `data` (see example above). +* `expression`: an [`expr`](https://github.com/expr-lang/expr) expression. See language definition [here](https://github.com/expr-lang/expr/blob/master/docs/Language-Definition.md). When defining `expr` expressions Argo will pass the available data to the environment as a variable called `data` (see example above). We understand that the `expression` transformation is limited. We intend to greatly expand the functionality of this template with our community's feedback. Please see the link at the top of this document to submit ideas or use cases for this feature. diff --git a/docs/events.md b/docs/events.md index 6fdb4d8f59c9..d7d6eb72d5cd 100644 --- a/docs/events.md +++ b/docs/events.md @@ -156,7 +156,7 @@ requirements](https://kubernetes.io/docs/concepts/overview/working-with-objects/ Because the endpoint accepts any JSON data, it is the user's responsibility to write a suitable expression to correctly filter the events they are interested in. Therefore, DO NOT assume the existence of any fields, and guard against them using a nil check. -[Learn more about expression syntax](https://github.com/antonmedv/expr). +[Learn more about expression syntax](https://github.com/expr-lang/expr). ### Expression Environment diff --git a/docs/executor_swagger.md b/docs/executor_swagger.md index 7e3d674dd8c8..fb365f905478 100644 --- a/docs/executor_swagger.md +++ b/docs/executor_swagger.md @@ -4837,7 +4837,7 @@ Cannot be updated. |------|------|---------|:--------:| ------- |-------------|---------| | configMapKeyRef | [ConfigMapKeySelector](#config-map-key-selector)| `ConfigMapKeySelector` | | | | | | default | [AnyString](#any-string)| `AnyString` | | | | | -| event | string| `string` | | | Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` | | +| event | string| `string` | | | Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` | | | expression | string| `string` | | | Expression, if defined, is evaluated to specify the value for the parameter | | | jqFilter | string| `string` | | | JQFilter expression against the resource object in resource templates | | | jsonPath | string| `string` | | | JSONPath of a resource to retrieve an output parameter value from in resource templates | | diff --git a/docs/fields.md b/docs/fields.md index f86743b23754..4616db922a3a 100644 --- a/docs/fields.md +++ b/docs/fields.md @@ -3533,7 +3533,7 @@ ValueFrom describes a location in which to obtain the value to a parameter |:----------:|:----------:|---------------| |`configMapKeyRef`|[`ConfigMapKeySelector`](#configmapkeyselector)|ConfigMapKeyRef is configmap selector for input parameter configuration| |`default`|`string`|Default specifies a value to be used if retrieving the value from the specified source fails| -|`event`|`string`|Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`| +|`event`|`string`|Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`| |`expression`|`string`|Expression, if defined, is evaluated to specify the value for the parameter| |`jqFilter`|`string`|JQFilter expression against the resource object in resource templates| |`jsonPath`|`string`|JSONPath of a resource to retrieve an output parameter value from in resource templates| diff --git a/docs/retries.md b/docs/retries.md index a4222b10ce3d..00106a521d49 100644 --- a/docs/retries.md +++ b/docs/retries.md @@ -86,7 +86,7 @@ spec: > v3.2 and after You can also use `expression` to control retries. The `expression` field -accepts an [expr](https://github.com/antonmedv/expr) expression and has +accepts an [expr](https://github.com/expr-lang/expr) expression and has access to the following variables: - `lastRetry.exitCode`: The exit code of the last retry, or "-1" if not available diff --git a/docs/variables.md b/docs/variables.md index d8eec76280ac..669fd622341f 100644 --- a/docs/variables.md +++ b/docs/variables.md @@ -56,7 +56,7 @@ The tag is substituted with the result of evaluating the tag as an expression. Note that any hyphenated parameter names or step names will cause a parsing error. You can reference them by indexing into the parameter or step map, e.g. `inputs.parameters['my-param']` or `steps['my-step'].outputs.result`. -[Learn about the expression syntax](https://github.com/antonmedv/expr/blob/master/docs/Language-Definition.md). +[Learn about the expression syntax](https://github.com/expr-lang/expr/blob/master/docs/language-definition.md). #### Examples diff --git a/go.mod b/go.mod index 389c9ef11eb4..8c444c7657a9 100644 --- a/go.mod +++ b/go.mod @@ -12,14 +12,14 @@ require ( github.com/alibabacloud-go/tea v1.2.1 github.com/aliyun/aliyun-oss-go-sdk v3.0.1+incompatible github.com/aliyun/credentials-go v1.3.2 - github.com/antonmedv/expr v1.15.5 github.com/argoproj/argo-events v1.7.3 github.com/argoproj/pkg v0.13.7-0.20230901113346-235a5432ec98 github.com/blushft/go-diagrams v0.0.0-20201006005127-c78c821223d9 github.com/colinmarc/hdfs/v2 v2.4.0 github.com/coreos/go-oidc/v3 v3.7.0 github.com/doublerebel/bellows v0.0.0-20160303004610-f177d92a03d3 - github.com/evanphx/json-patch v5.7.0+incompatible + github.com/evanphx/json-patch v5.8.0+incompatible + github.com/expr-lang/expr v1.16.0 github.com/gavv/httpexpect/v2 v2.10.0 github.com/go-git/go-git/v5 v5.11.0 github.com/go-jose/go-jose/v3 v3.0.1 diff --git a/go.sum b/go.sum index 4936196387cc..d366a08aead2 100644 --- a/go.sum +++ b/go.sum @@ -149,8 +149,6 @@ github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHG github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/antonmedv/expr v1.15.5 h1:y0Iz3cEwmpRz5/r3w4qQR0MfIqJGdGM1zbhD/v0G5Vg= -github.com/antonmedv/expr v1.15.5/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE= github.com/argoproj/argo-events v1.7.3 h1:XiGnKCzRRQCI7sFCKw3RoeFUOR6IupfAJI9uUK7pnG8= github.com/argoproj/argo-events v1.7.3/go.mod h1:YxDOXrveW52SDAeeTI93Wagkr4jt5DK0dA0juIdWDRw= github.com/argoproj/pkg v0.13.7-0.20230901113346-235a5432ec98 h1:Y1wJVJePMad3LwH+OIX4cl9ND3251XUNxjgpxFRWmZs= @@ -306,12 +304,14 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7 github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.11.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/evanphx/json-patch v5.7.0+incompatible h1:vgGkfT/9f8zE6tvSCe74nfpAVDQ2tG6yudJd8LBksgI= -github.com/evanphx/json-patch v5.7.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch v5.8.0+incompatible h1:1Av9pn2FyxPdvrWNQszj1g6D6YthSmvCfcN6SYclTJg= +github.com/evanphx/json-patch v5.8.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evilmonkeyinc/jsonpath v0.8.1 h1:W8K4t8u7aipkQE0hcTICGAdAN0Xph349LtjgSoofvVo= github.com/evilmonkeyinc/jsonpath v0.8.1/go.mod h1:EQhs0ZsoD4uD56ZJbO30gMTfHLQ6DEa0/5rT5Ymy42s= github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM= github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4= +github.com/expr-lang/expr v1.16.0 h1:BQabx+PbjsL2PEQwkJ4GIn3CcuUh8flduHhJ0lHjWwE= +github.com/expr-lang/expr v1.16.0/go.mod h1:uCkhfG+x7fcZ5A5sXHKuQ07jGZRl6J0FCAaf2k4PtVQ= github.com/fasthttp/websocket v1.4.3-rc.6 h1:omHqsl8j+KXpmzRjF8bmzOSYJ8GnS0E3efi1wYT+niY= github.com/fasthttp/websocket v1.4.3-rc.6/go.mod h1:43W9OM2T8FeXpCWMsBd9Cb7nE2CACNqNvCqQCoty/Lc= github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= diff --git a/pkg/apis/workflow/v1alpha1/event_types.go b/pkg/apis/workflow/v1alpha1/event_types.go index bdfab46d3aa7..d6f49b0d8648 100644 --- a/pkg/apis/workflow/v1alpha1/event_types.go +++ b/pkg/apis/workflow/v1alpha1/event_types.go @@ -32,7 +32,7 @@ type WorkflowEventBindingSpec struct { } type Event struct { - // Selector (https://github.com/antonmedv/expr) that we must must match the event. E.g. `payload.message == "test"` + // Selector (https://github.com/expr-lang/expr) that we must must match the event. E.g. `payload.message == "test"` Selector string `json:"selector" protobuf:"bytes,1,opt,name=selector"` } diff --git a/pkg/apis/workflow/v1alpha1/generated.proto b/pkg/apis/workflow/v1alpha1/generated.proto index a6f52685499f..af41463f1daa 100644 --- a/pkg/apis/workflow/v1alpha1/generated.proto +++ b/pkg/apis/workflow/v1alpha1/generated.proto @@ -596,7 +596,7 @@ message DataSource { } message Event { - // Selector (https://github.com/antonmedv/expr) that we must must match the event. E.g. `payload.message == "test"` + // Selector (https://github.com/expr-lang/expr) that we must must match the event. E.g. `payload.message == "test"` optional string selector = 1; } @@ -1747,7 +1747,7 @@ message ValueFrom { // JQFilter expression against the resource object in resource templates optional string jqFilter = 3; - // Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` + // Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` optional string event = 7; // Parameter reference to a step or dag task in which to retrieve an output parameter value from diff --git a/pkg/apis/workflow/v1alpha1/openapi_generated.go b/pkg/apis/workflow/v1alpha1/openapi_generated.go index 2b7a4b5edc24..187a3737836d 100644 --- a/pkg/apis/workflow/v1alpha1/openapi_generated.go +++ b/pkg/apis/workflow/v1alpha1/openapi_generated.go @@ -2543,7 +2543,7 @@ func schema_pkg_apis_workflow_v1alpha1_Event(ref common.ReferenceCallback) commo Properties: map[string]spec.Schema{ "selector": { SchemaProps: spec.SchemaProps{ - Description: "Selector (https://github.com/antonmedv/expr) that we must must match the event. E.g. `payload.message == \"test\"`", + Description: "Selector (https://github.com/expr-lang/expr) that we must must match the event. E.g. `payload.message == \"test\"`", Default: "", Type: []string{"string"}, Format: "", @@ -6738,7 +6738,7 @@ func schema_pkg_apis_workflow_v1alpha1_ValueFrom(ref common.ReferenceCallback) c }, "event": { SchemaProps: spec.SchemaProps{ - Description: "Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`", + Description: "Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`", Type: []string{"string"}, Format: "", }, diff --git a/pkg/apis/workflow/v1alpha1/workflow_types.go b/pkg/apis/workflow/v1alpha1/workflow_types.go index 6aab5cadcc4e..68a850018344 100644 --- a/pkg/apis/workflow/v1alpha1/workflow_types.go +++ b/pkg/apis/workflow/v1alpha1/workflow_types.go @@ -886,7 +886,7 @@ type ValueFrom struct { // JQFilter expression against the resource object in resource templates JQFilter string `json:"jqFilter,omitempty" protobuf:"bytes,3,opt,name=jqFilter"` - // Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` + // Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` Event string `json:"event,omitempty" protobuf:"bytes,7,opt,name=event"` // Parameter reference to a step or dag task in which to retrieve an output parameter value from diff --git a/pkg/plugins/executor/swagger.yml b/pkg/plugins/executor/swagger.yml index 68b965509042..4cf099c2e39c 100644 --- a/pkg/plugins/executor/swagger.yml +++ b/pkg/plugins/executor/swagger.yml @@ -4415,7 +4415,7 @@ definitions: default: $ref: '#/definitions/AnyString' event: - description: Selector (https://github.com/antonmedv/expr) that is evaluated + description: Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` type: string expression: diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1Event.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1Event.md index a9564452ca03..3963165b5b4e 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1Event.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1Event.md @@ -7,7 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**selector** | **String** | Selector (https://github.com/antonmedv/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` | +**selector** | **String** | Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` | diff --git a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md index 9cc6cd901a91..046d505babd1 100644 --- a/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md +++ b/sdks/java/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md @@ -10,7 +10,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **configMapKeyRef** | [**io.kubernetes.client.openapi.models.V1ConfigMapKeySelector**](io.kubernetes.client.openapi.models.V1ConfigMapKeySelector.md) | | [optional] **_default** | **String** | Default specifies a value to be used if retrieving the value from the specified source fails | [optional] -**event** | **String** | Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` | [optional] +**event** | **String** | Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` | [optional] **expression** | **String** | Expression, if defined, is evaluated to specify the value for the parameter | [optional] **jqFilter** | **String** | JQFilter expression against the resource object in resource templates | [optional] **jsonPath** | **String** | JSONPath of a resource to retrieve an output parameter value from in resource templates | [optional] diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_event.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_event.py index 553bbe1af801..b5f3e0182b31 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_event.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_event.py @@ -104,7 +104,7 @@ def _from_openapi_data(cls, selector, *args, **kwargs): # noqa: E501 """IoArgoprojWorkflowV1alpha1Event - a model defined in OpenAPI Args: - selector (str): Selector (https://github.com/antonmedv/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` + selector (str): Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -189,7 +189,7 @@ def __init__(self, selector, *args, **kwargs): # noqa: E501 """IoArgoprojWorkflowV1alpha1Event - a model defined in OpenAPI Args: - selector (str): Selector (https://github.com/antonmedv/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` + selector (str): Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_value_from.py b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_value_from.py index 099aae1664ef..312c53dbed38 100644 --- a/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_value_from.py +++ b/sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_value_from.py @@ -158,7 +158,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 default (str): Default specifies a value to be used if retrieving the value from the specified source fails. [optional] # noqa: E501 - event (str): Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`. [optional] # noqa: E501 + event (str): Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`. [optional] # noqa: E501 expression (str): Expression, if defined, is evaluated to specify the value for the parameter. [optional] # noqa: E501 jq_filter (str): JQFilter expression against the resource object in resource templates. [optional] # noqa: E501 json_path (str): JSONPath of a resource to retrieve an output parameter value from in resource templates. [optional] # noqa: E501 @@ -248,7 +248,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) config_map_key_ref (ConfigMapKeySelector): [optional] # noqa: E501 default (str): Default specifies a value to be used if retrieving the value from the specified source fails. [optional] # noqa: E501 - event (str): Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`. [optional] # noqa: E501 + event (str): Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message`. [optional] # noqa: E501 expression (str): Expression, if defined, is evaluated to specify the value for the parameter. [optional] # noqa: E501 jq_filter (str): JQFilter expression against the resource object in resource templates. [optional] # noqa: E501 json_path (str): JSONPath of a resource to retrieve an output parameter value from in resource templates. [optional] # noqa: E501 diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Event.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Event.md index 2822aa9998fc..8fe08221b8d7 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Event.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1Event.md @@ -4,7 +4,7 @@ ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**selector** | **str** | Selector (https://github.com/antonmedv/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` | +**selector** | **str** | Selector (https://github.com/expr-lang/expr) that we must must match the io.argoproj.workflow.v1alpha1. E.g. `payload.message == \"test\"` | **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md index 93d2c9f2ec1d..d603c7bddb90 100644 --- a/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md +++ b/sdks/python/client/docs/IoArgoprojWorkflowV1alpha1ValueFrom.md @@ -7,7 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **config_map_key_ref** | [**ConfigMapKeySelector**](ConfigMapKeySelector.md) | | [optional] **default** | **str** | Default specifies a value to be used if retrieving the value from the specified source fails | [optional] -**event** | **str** | Selector (https://github.com/antonmedv/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` | [optional] +**event** | **str** | Selector (https://github.com/expr-lang/expr) that is evaluated against the event to get the value of the parameter. E.g. `payload.message` | [optional] **expression** | **str** | Expression, if defined, is evaluated to specify the value for the parameter | [optional] **jq_filter** | **str** | JQFilter expression against the resource object in resource templates | [optional] **json_path** | **str** | JSONPath of a resource to retrieve an output parameter value from in resource templates | [optional] diff --git a/server/event/dispatch/operation.go b/server/event/dispatch/operation.go index de9f13cf83c6..c24a95a4e79f 100644 --- a/server/event/dispatch/operation.go +++ b/server/event/dispatch/operation.go @@ -6,7 +6,7 @@ import ( "fmt" "strings" - "github.com/antonmedv/expr" + "github.com/expr-lang/expr" log "github.com/sirupsen/logrus" "google.golang.org/grpc/metadata" corev1 "k8s.io/api/core/v1" @@ -121,7 +121,11 @@ func (o *Operation) dispatch(ctx context.Context, wfeb wfv1.WorkflowEventBinding if p.ValueFrom == nil { return nil, fmt.Errorf("malformed workflow template parameter \"%s\": valueFrom is nil", p.Name) } - result, err := expr.Eval(p.ValueFrom.Event, o.env) + program, err := expr.Compile(p.ValueFrom.Event, expr.Env(o.env)) + if err != nil { + return nil, fmt.Errorf("failed to compile workflow template parameter %s expression: %w", p.Name, err) + } + result, err := expr.Run(program, o.env) if err != nil { return nil, fmt.Errorf("failed to evaluate workflow template parameter \"%s\" expression: %w", p.Name, err) } @@ -183,7 +187,12 @@ func (o *Operation) populateWorkflowMetadata(wf *wfv1.Workflow, metadata *metav1 } func (o *Operation) evaluateStringExpression(statement string, errorInfo string) (string, error) { - result, err := expr.Eval(statement, exprenv.GetFuncMap(o.env)) + env := exprenv.GetFuncMap(o.env) + program, err := expr.Compile(statement, expr.Env(env)) + if err != nil { + return "", fmt.Errorf("failed to evaluate workflow %s expression: %w", errorInfo, err) + } + result, err := expr.Run(program, env) if err != nil { return "", fmt.Errorf("failed to evaluate workflow %s expression: %w", errorInfo, err) } diff --git a/server/event/dispatch/operation_test.go b/server/event/dispatch/operation_test.go index abdbe6cab5b9..36d49fa725f1 100644 --- a/server/event/dispatch/operation_test.go +++ b/server/event/dispatch/operation_test.go @@ -187,12 +187,12 @@ func TestNewOperation(t *testing.T) { sort.Strings(paramValues) assert.Equal(t, expectedParamValues, paramValues) } - assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow template expression: unable to evaluate expression '': unexpected token EOF (1:1)", <-recorder.Events) + assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow template expression: unexpected token EOF (1:1)", <-recorder.Events) assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to get workflow template: workflowtemplates.argoproj.io \"not-found\" not found", <-recorder.Events) assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to validate workflow template instanceid: 'my-wft-3' is not managed by the current Argo Server", <-recorder.Events) - assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow template expression: unable to evaluate expression 'garbage!!!!!!': unexpected token Operator(\"!\") (1:8)\n | garbage!!!!!!\n | .......^", <-recorder.Events) + assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow template expression: unexpected token Operator(\"!\") (1:8)\n | garbage!!!!!!\n | .......^", <-recorder.Events) assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow template expression: unable to cast expression result 'garbage' to bool", <-recorder.Events) - assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to evaluate workflow template parameter \"my-param\" expression: unexpected token Operator(\"!\") (1:8)\n | rubbish!!!\n | .......^", <-recorder.Events) + assert.Equal(t, "Warning WorkflowEventBindingError failed to dispatch event: failed to compile workflow template parameter my-param expression: unexpected token Operator(\"!\") (1:8)\n | rubbish!!!\n | .......^", <-recorder.Events) } func Test_populateWorkflowMetadata(t *testing.T) { diff --git a/test/e2e/argo_server_test.go b/test/e2e/argo_server_test.go index 9e862ae1bc4f..e064a795f2cd 100644 --- a/test/e2e/argo_server_test.go +++ b/test/e2e/argo_server_test.go @@ -312,7 +312,7 @@ metadata: func(t *testing.T, e []corev1.Event) { assert.Equal(t, "argo", e[0].InvolvedObject.Namespace) assert.Equal(t, "WorkflowEventBindingError", e[0].Reason) - assert.Equal(t, "failed to dispatch event: failed to evaluate workflow template expression: unable to evaluate expression '': unexpected token EOF (1:1)", e[0].Message) + assert.Equal(t, "failed to dispatch event: failed to evaluate workflow template expression: unexpected token EOF (1:1)", e[0].Message) }, ) } diff --git a/test/e2e/expr_lang.go b/test/e2e/expr_lang.go new file mode 100644 index 000000000000..3c0511d53c8d --- /dev/null +++ b/test/e2e/expr_lang.go @@ -0,0 +1,70 @@ +//go:build functional +// +build functional + +package e2e + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + apiv1 "k8s.io/api/core/v1" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/argoproj/argo-workflows/v3/test/e2e/fixtures" +) + +type ExprSuite struct { + fixtures.E2ESuite +} + +func (s *ExprSuite) TestRegression12037() { + s.Given(). + Workflow(`apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: broken- +spec: + entrypoint: main + templates: + - name: main + dag: + tasks: + - name: split + template: foo + - name: map + template: foo + depends: split + + - name: foo + container: + image: alpine + command: + - sh + - -c + - | + echo "foo" +`).When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeSucceeded). + Then(). + ExpectWorkflow(func(t *testing.T, metadata *v1.ObjectMeta, status *v1alpha1.WorkflowStatus) { + assert.Equal(t, status.Phase, v1alpha1.WorkflowSucceeded) + }). + ExpectWorkflowNode(func(status v1alpha1.NodeStatus) bool { + return strings.Contains(status.Name, ".split") + }, func(t *testing.T, status *v1alpha1.NodeStatus, pod *apiv1.Pod) { + assert.Equal(t, v1alpha1.NodeSucceeded, status.Phase) + }). + ExpectWorkflowNode(func(status v1alpha1.NodeStatus) bool { + return strings.Contains(status.Name, ".map") + }, func(t *testing.T, status *v1alpha1.NodeStatus, pod *apiv1.Pod) { + assert.Equal(t, v1alpha1.NodeSucceeded, status.Phase) + }) +} + +func TestExprLangSSuite(t *testing.T) { + suite.Run(t, new(ExprSuite)) +} diff --git a/util/expr/argoexpr/eval.go b/util/expr/argoexpr/eval.go index 62394e91eb57..41ce14003189 100644 --- a/util/expr/argoexpr/eval.go +++ b/util/expr/argoexpr/eval.go @@ -3,11 +3,15 @@ package argoexpr import ( "fmt" - "github.com/antonmedv/expr" + "github.com/expr-lang/expr" ) func EvalBool(input string, env interface{}) (bool, error) { - result, err := expr.Eval(input, env) + program, err := expr.Compile(input, expr.Env(env)) + if err != nil { + return false, err + } + result, err := expr.Run(program, env) if err != nil { return false, fmt.Errorf("unable to evaluate expression '%s': %s", input, err) } diff --git a/util/expr/argoexpr/eval_test.go b/util/expr/argoexpr/eval_test.go index 1f3718480357..28a59f5b7608 100644 --- a/util/expr/argoexpr/eval_test.go +++ b/util/expr/argoexpr/eval_test.go @@ -40,6 +40,24 @@ func TestEvalBool(t *testing.T) { want: true, wantErr: false, }, + { + name: "test override builtins", + args: args{ + input: "split == 1", + env: map[string]interface{}{"split": 1}, + }, + want: true, + wantErr: false, + }, + { + name: "test override builtins", + args: args{ + input: "join == 1", + env: map[string]interface{}{"join": 1}, + }, + want: true, + wantErr: false, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/util/template/expression_template.go b/util/template/expression_template.go index f80b6dcb97c9..94ab2b8b5621 100644 --- a/util/template/expression_template.go +++ b/util/template/expression_template.go @@ -7,10 +7,10 @@ import ( "os" "strings" - "github.com/antonmedv/expr" - "github.com/antonmedv/expr/file" - "github.com/antonmedv/expr/parser/lexer" "github.com/doublerebel/bellows" + "github.com/expr-lang/expr" + "github.com/expr-lang/expr/file" + "github.com/expr-lang/expr/parser/lexer" log "github.com/sirupsen/logrus" ) @@ -41,7 +41,7 @@ func expressionReplace(w io.Writer, expression string, env map[string]interface{ // This is to make sure expressions which contains `workflow.status` and `work.failures` don't get resolved to nil // when `workflow.status` and `workflow.failures` don't exist in the env. - // See https://github.com/argoproj/argo-workflows/issues/10393, https://github.com/antonmedv/expr/issues/330 + // See https://github.com/argoproj/argo-workflows/issues/10393, https://github.com/expr-lang/expr/issues/330 // This issue doesn't happen to other template parameters since `workflow.status` and `workflow.failures` only exist in the env // when the exit handlers complete. if ((hasWorkflowStatus(unmarshalledExpression) && !hasVarInEnv(env, "workflow.status")) || @@ -50,7 +50,14 @@ func expressionReplace(w io.Writer, expression string, env map[string]interface{ return w.Write([]byte(fmt.Sprintf("{{%s%s}}", kindExpression, expression))) } - result, err := expr.Eval(unmarshalledExpression, env) + program, err := expr.Compile(unmarshalledExpression, expr.Env(env)) + // This allowUnresolved check is not great + // it allows for errors that are obviously + // not failed reference checks to also pass + if err != nil && !allowUnresolved { + return 0, fmt.Errorf("failed to evaluate expression: %w", err) + } + result, err := expr.Run(program, env) if (err != nil || result == nil) && allowUnresolved { // result is also un-resolved, and any error can be unresolved log.WithError(err).Debug("Result and error are unresolved") diff --git a/util/template/replace_test.go b/util/template/replace_test.go index 0ab0c8ae6a60..9f9f02697325 100644 --- a/util/template/replace_test.go +++ b/util/template/replace_test.go @@ -78,7 +78,7 @@ func Test_Replace(t *testing.T) { }) t.Run("Disallowed", func(t *testing.T) { _, err := Replace(toJsonString("{{=foo}}"), nil, false) - assert.EqualError(t, err, "failed to evaluate expression \"foo\"") + assert.EqualError(t, err, "failed to evaluate expression: unknown name foo (1:1)\n | foo\n | ^") }) t.Run("DisallowedWorkflowStatus", func(t *testing.T) { _, err := Replace(toJsonString(`{{=workflow.status == "Succeeded" ? "SUCCESSFUL" : "FAILED"}}`), nil, false) diff --git a/util/template/resolve_var.go b/util/template/resolve_var.go index 954faa8dc20c..6d4b3b210864 100644 --- a/util/template/resolve_var.go +++ b/util/template/resolve_var.go @@ -3,7 +3,7 @@ package template import ( "strings" - "github.com/antonmedv/expr" + "github.com/expr-lang/expr" "github.com/argoproj/argo-workflows/v3/errors" ) @@ -13,7 +13,11 @@ func ResolveVar(s string, m map[string]interface{}) (interface{}, error) { kind, expression := parseTag(tag) switch kind { case kindExpression: - result, err := expr.Eval(expression, m) + program, err := expr.Compile(expression, expr.Env(m)) + if err != nil { + return nil, errors.Errorf(errors.CodeBadRequest, "Unable to compile: %q", expression) + } + result, err := expr.Run(program, m) if err != nil { return nil, errors.Errorf(errors.CodeBadRequest, "Invalid expression: %q", expression) } diff --git a/util/template/resolve_var_test.go b/util/template/resolve_var_test.go index 6ca9219b11ba..f1c82e6d9dc2 100644 --- a/util/template/resolve_var_test.go +++ b/util/template/resolve_var_test.go @@ -31,11 +31,11 @@ func Test_ResolveVar(t *testing.T) { }) t.Run("Unresolved", func(t *testing.T) { _, err := ResolveVar("{{=foo}}", nil) - assert.EqualError(t, err, "Unable to resolve: \"=foo\"") + assert.EqualError(t, err, "Unable to compile: \"foo\"") }) t.Run("Error", func(t *testing.T) { _, err := ResolveVar("{{=!}}", nil) - assert.EqualError(t, err, "Invalid expression: \"!\"") + assert.EqualError(t, err, "Unable to compile: \"!\"") }) }) } diff --git a/workflow/controller/operator.go b/workflow/controller/operator.go index 8de46065b06b..a273233783ac 100644 --- a/workflow/controller/operator.go +++ b/workflow/controller/operator.go @@ -17,11 +17,11 @@ import ( "github.com/argoproj/argo-workflows/v3/util/secrets" - "github.com/antonmedv/expr" "github.com/argoproj/pkg/humanize" argokubeerr "github.com/argoproj/pkg/kube/errors" "github.com/argoproj/pkg/strftime" jsonpatch "github.com/evanphx/json-patch" + "github.com/expr-lang/expr" log "github.com/sirupsen/logrus" apiv1 "k8s.io/api/core/v1" policyv1 "k8s.io/api/policy/v1" @@ -562,7 +562,11 @@ func (woc *wfOperationCtx) updateWorkflowMetadata() error { env := env.GetFuncMap(template.EnvMap(woc.globalParams)) for n, f := range md.LabelsFrom { - r, err := expr.Eval(f.Expression, env) + program, err := expr.Compile(f.Expression, expr.Env(env)) + if err != nil { + return fmt.Errorf("Failed to compile function for expression %q: %w", f.Expression, err) + } + r, err := expr.Run(program, env) if err != nil { return fmt.Errorf("failed to evaluate label %q expression %q: %w", n, f.Expression, err) } diff --git a/workflow/controller/scope.go b/workflow/controller/scope.go index 42ce5356c7cd..30eb60a54230 100644 --- a/workflow/controller/scope.go +++ b/workflow/controller/scope.go @@ -4,7 +4,7 @@ import ( "encoding/json" "fmt" - "github.com/antonmedv/expr" + "github.com/expr-lang/expr" "github.com/argoproj/argo-workflows/v3/errors" wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" @@ -77,7 +77,11 @@ func (s *wfScope) resolveParameter(p *wfv1.ValueFrom) (interface{}, error) { } if p.Expression != "" { env := env.GetFuncMap(s.scope) - return expr.Eval(p.Expression, env) + program, err := expr.Compile(p.Expression, expr.Env(env)) + if err != nil { + return nil, err + } + return expr.Run(program, env) } else { return s.resolveVar(p.Parameter) } @@ -93,7 +97,15 @@ func (s *wfScope) resolveArtifact(art *wfv1.Artifact) (*wfv1.Artifact, error) { if art.FromExpression != "" { env := env.GetFuncMap(s.scope) - val, err = expr.Eval(art.FromExpression, env) + program, err := expr.Compile(art.FromExpression, expr.Env(env)) + if err != nil { + return nil, err + } + val, err = expr.Run(program, env) + if err != nil { + return nil, err + } + } else { val, err = s.resolveVar(art.From) } diff --git a/workflow/data/data.go b/workflow/data/data.go index 9ffd7add77df..942fcc62f941 100644 --- a/workflow/data/data.go +++ b/workflow/data/data.go @@ -3,7 +3,7 @@ package data import ( "fmt" - "github.com/antonmedv/expr" + "github.com/expr-lang/expr" wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" ) @@ -56,5 +56,10 @@ func processTransformation(data interface{}, transformation *wfv1.Transformation } func processExpression(expression string, data interface{}) (interface{}, error) { - return expr.Eval(expression, map[string]interface{}{"data": data}) + env := map[string]interface{}{"data": data} + program, err := expr.Compile(expression, expr.Env(env)) + if err != nil { + return nil, err + } + return expr.Run(program, env) } From c425aa0ee572a39ead178add6357595cd4c20a07 Mon Sep 17 00:00:00 2001 From: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Date: Sun, 11 Feb 2024 04:49:58 -0500 Subject: [PATCH 27/38] fix(docs): remove `workflow-controller-configmap.yaml` self reference (#12654) Signed-off-by: Anton Gilgur Signed-off-by: Isitha Subasinghe From 288eddcfeb34d53b14c72f698007c48e9afe7906 Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Sun, 14 Jan 2024 01:41:56 +0800 Subject: [PATCH 28/38] =?UTF-8?q?fix:=20wrong=20values=20are=20assigned=20?= =?UTF-8?q?to=20input=20parameters=20of=20workflowtemplat=E2=80=A6=20(#124?= =?UTF-8?q?12)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: shuangkun Co-authored-by: sherwinkoo29 Signed-off-by: Isitha Subasinghe --- workflow/util/merge.go | 2 + workflow/util/merge_test.go | 146 ++++++++++++++++++++++++++++++++++++ 2 files changed, 148 insertions(+) diff --git a/workflow/util/merge.go b/workflow/util/merge.go index 3b21d5cf02c3..c6a0226968b7 100644 --- a/workflow/util/merge.go +++ b/workflow/util/merge.go @@ -36,6 +36,8 @@ func MergeTo(patch, target *wfv1.Workflow) error { if err != nil { return err } + + target.Spec = wfv1.WorkflowSpec{} err = json.Unmarshal(mergedWfByte, target) if err != nil { return err diff --git a/workflow/util/merge_test.go b/workflow/util/merge_test.go index 738047a21b35..7899f6bd93b1 100644 --- a/workflow/util/merge_test.go +++ b/workflow/util/merge_test.go @@ -224,6 +224,141 @@ spec: ` +var wfArguments = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: test-workflow +spec: + workflowTemplateRef: + name: test-workflow-template + arguments: + parameters: + - name: PARAM1 + valueFrom: + configMapKeyRef: + name: test-config-map + key: PARAM1 + - name: PARAM2 + valueFrom: + configMapKeyRef: + name: test-config-map + key: PARAM2 + - name: PARAM4 + valueFrom: + configMapKeyRef: + name: test-config-map + key: PARAM4 + - name: PARAM5 + value: "Workflow value 5"` + +var wfArgumentsTemplate = ` +apiVersion: argoproj.io/v1alpha1 +kind: WorkflowTemplate +metadata: + name: test-workflow-template +spec: + entrypoint: main + ttlStrategy: + secondsAfterCompletion: 600 + secondsAfterSuccess: 600 + secondsAfterFailure: 600 + arguments: + parameters: + - name: PARAM1 + - name: PARAM2 + - name: PARAM3 + value: WorkflowTemplate value 3 + - name: PARAM4 + - name: PARAM5 + templates: + - name: main + inputs: + parameters: + - name: PARAM1 + value: "{{workflow.parameters.PARAM1}}" + - name: PARAM2 + value: "{{workflow.parameters.PARAM2}}" + - name: PARAM3 + value: "{{workflow.parameters.PARAM3}}" + - name: PARAM4 + value: "{{workflow.parameters.PARAM4}}" + - name: PARAM5 + value: "{{workflow.parameters.PARAM5}}" + script: + image: busybox:latest + command: + - sh + source: | + echo -e " + PARAM1={{inputs.parameters.PARAM1}} + PARAM2={{inputs.parameters.PARAM2}} + PARAM3={{inputs.parameters.PARAM3}} + PARAM4={{inputs.parameters.PARAM4}} + PARAM5={{inputs.parameters.PARAM5}} + " +` + +var wfArgumentsResult = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + name: test-workflow +spec: + entrypoint: main + ttlStrategy: + secondsAfterCompletion: 600 + secondsAfterSuccess: 600 + secondsAfterFailure: 600 + arguments: + parameters: + - name: PARAM1 + valueFrom: + configMapKeyRef: + name: test-config-map + key: PARAM1 + - name: PARAM2 + valueFrom: + configMapKeyRef: + name: test-config-map + key: PARAM2 + - name: PARAM3 + value: WorkflowTemplate value 3 + - name: PARAM4 + valueFrom: + configMapKeyRef: + name: test-config-map + key: PARAM4 + - name: PARAM5 + value: "Workflow value 5" + templates: + - name: main + inputs: + parameters: + - name: PARAM1 + value: "{{workflow.parameters.PARAM1}}" + - name: PARAM2 + value: "{{workflow.parameters.PARAM2}}" + - name: PARAM3 + value: "{{workflow.parameters.PARAM3}}" + - name: PARAM4 + value: "{{workflow.parameters.PARAM4}}" + - name: PARAM5 + value: "{{workflow.parameters.PARAM5}}" + script: + image: busybox:latest + command: + - sh + source: | + echo -e " + PARAM1={{inputs.parameters.PARAM1}} + PARAM2={{inputs.parameters.PARAM2}} + PARAM3={{inputs.parameters.PARAM3}} + PARAM4={{inputs.parameters.PARAM4}} + PARAM5={{inputs.parameters.PARAM5}} + " +` + func TestJoinWfSpecs(t *testing.T) { assert := assert.New(t) wfDefault := wfv1.MustUnmarshalWorkflow(wfDefault) @@ -239,6 +374,17 @@ func TestJoinWfSpecs(t *testing.T) { assert.Equal("whalesay", targetWf.Spec.Entrypoint) } +func TestJoinWfSpecArguments(t *testing.T) { + assert := assert.New(t) + wf := wfv1.MustUnmarshalWorkflow(wfArguments) + wft := wfv1.MustUnmarshalWorkflowTemplate(wfArgumentsTemplate) + result := wfv1.MustUnmarshalWorkflow(wfArgumentsResult) + + targetWf, err := JoinWorkflowSpec(&wf.Spec, wft.GetWorkflowSpec(), nil) + assert.NoError(err) + assert.Equal(result.Spec.Arguments, targetWf.Spec.Arguments) +} + func TestJoinWorkflowMetaData(t *testing.T) { assert := assert.New(t) t.Run("WfDefaultMetaData", func(t *testing.T) { From eb71bad60321fcdb5638471cf21ac67fb8a98a2a Mon Sep 17 00:00:00 2001 From: Garett MacGowan Date: Fri, 12 Jan 2024 12:24:48 -0500 Subject: [PATCH 29/38] fix: Add missing 'archived' prop for ArtifactPanel component. Fixes #12331 (#12397) Signed-off-by: Garett MacGowan Signed-off-by: Isitha Subasinghe --- test/e2e/argo_server_test.go | 24 ++++++++++ .../testdata/artifact-passing-workflow.yaml | 45 +++++++++++++++++++ .../workflow-details/artifact-panel.tsx | 2 +- .../workflow-details/workflow-details.tsx | 8 +++- 4 files changed, 76 insertions(+), 3 deletions(-) create mode 100644 test/e2e/testdata/artifact-passing-workflow.yaml diff --git a/test/e2e/argo_server_test.go b/test/e2e/argo_server_test.go index e064a795f2cd..015a93e7e025 100644 --- a/test/e2e/argo_server_test.go +++ b/test/e2e/argo_server_test.go @@ -1037,6 +1037,30 @@ spec: }) } +func (s *ArgoServerSuite) TestArtifactServerArchivedWorkflow() { + var uid types.UID + var nodeID string + s.Given(). + Workflow(`@testdata/artifact-passing-workflow.yaml`). + When(). + SubmitWorkflow(). + WaitForWorkflow(fixtures.ToBeArchived). + Then(). + ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { + uid = metadata.UID + nodeID = status.Nodes.FindByDisplayName("generate-artifact").ID + }) + + // In this case, the artifact name is a file + s.Run("GetArtifactByNodeID", func() { + s.e().GET("/artifact-files/argo/archived-workflows/{uid}/{nodeID}/outputs/hello", uid, nodeID). + Expect(). + Status(200). + Body(). + Contains(":) Hello Argo!") + }) +} + func (s *ArgoServerSuite) TestArtifactServerArchivedStoppedWorkflow() { var uid types.UID var nodeID string diff --git a/test/e2e/testdata/artifact-passing-workflow.yaml b/test/e2e/testdata/artifact-passing-workflow.yaml new file mode 100644 index 000000000000..b2c25881fb57 --- /dev/null +++ b/test/e2e/testdata/artifact-passing-workflow.yaml @@ -0,0 +1,45 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: artifact-passing- +spec: + entrypoint: artifact-example + templates: + - name: artifact-example + steps: + - - name: generate-artifact + template: create-message + - - name: consume-artifact + template: print-message + arguments: + artifacts: + # bind message to the hello artifact + # generated by the generate-artifact step + - name: message + from: "{{steps.generate-artifact.outputs.artifacts.hello}}" + + - name: create-message + container: + image: alpine:latest + command: [sh, -c] + args: ["echo ':) Hello Argo!' | tee /tmp/hello_world.txt"] + outputs: + artifacts: + # generate hello artifact from /tmp/hello_world.txt + # artifacts can be directories as well as files + - name: hello + path: /tmp/hello_world.txt + archive: + none: {} + + - name: print-message + inputs: + artifacts: + # unpack the message input artifact + # and put it at /tmp/message + - name: message + path: /tmp/message + container: + image: alpine:latest + command: [sh, -c] + args: ["cat /tmp/message"] \ No newline at end of file diff --git a/ui/src/app/workflows/components/workflow-details/artifact-panel.tsx b/ui/src/app/workflows/components/workflow-details/artifact-panel.tsx index 1b73a6c11b3c..496d83867b6f 100644 --- a/ui/src/app/workflows/components/workflow-details/artifact-panel.tsx +++ b/ui/src/app/workflows/components/workflow-details/artifact-panel.tsx @@ -21,7 +21,7 @@ export const ArtifactPanel = ({ }: { workflow: Workflow; artifact: Artifact & {nodeId: string; artifactNameDiscriminator: string}; - archived?: boolean; + archived: boolean; artifactRepository: ArtifactRepository; }) => { const input = artifact.artifactNameDiscriminator === 'input'; diff --git a/ui/src/app/workflows/components/workflow-details/workflow-details.tsx b/ui/src/app/workflows/components/workflow-details/workflow-details.tsx index a4b05885d414..7e094c5dd6c1 100644 --- a/ui/src/app/workflows/components/workflow-details/workflow-details.tsx +++ b/ui/src/app/workflows/components/workflow-details/workflow-details.tsx @@ -503,6 +503,8 @@ export function WorkflowDetails({history, location, match}: RouteComponentProps< const podName = ensurePodName(workflow, selectedNode, nodeId); + const archived = isArchivedWorkflow(workflow); + return ( setSidePanel(`logs:${x}:${container}`)} onShowEvents={() => setSidePanel(`events:${nodeId}`)} onShowYaml={() => setSidePanel(`yaml:${nodeId}`)} - archived={isArchivedWorkflow(workflow)} + archived={archived} onResume={() => renderResumePopup()} /> )} - {selectedArtifact && } + {selectedArtifact && ( + + )} ))} From a4674b9a193451ad8379bd0c55604232c181abea Mon Sep 17 00:00:00 2001 From: shuangkun tian <72060326+shuangkun@users.noreply.github.com> Date: Sat, 13 Jan 2024 00:35:46 +0800 Subject: [PATCH 30/38] fix: merge env bug in workflow-controller-configmap and container. Fixes #12424 (#12426) Signed-off-by: shuangkun Co-authored-by: sherwinkoo29 Signed-off-by: Isitha Subasinghe --- workflow/controller/workflowpod.go | 9 ++- workflow/controller/workflowpod_test.go | 85 +++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 2 deletions(-) diff --git a/workflow/controller/workflowpod.go b/workflow/controller/workflowpod.go index c53d32b2303f..fd0af602e55a 100644 --- a/workflow/controller/workflowpod.go +++ b/workflow/controller/workflowpod.go @@ -8,6 +8,8 @@ import ( "strconv" "time" + "k8s.io/apimachinery/pkg/util/strategicpatch" + log "github.com/sirupsen/logrus" apiv1 "k8s.io/api/core/v1" apierr "k8s.io/apimachinery/pkg/api/errors" @@ -117,10 +119,13 @@ func (woc *wfOperationCtx) createWorkflowPod(ctx context.Context, nodeName strin if err != nil { return nil, err } - if err := json.Unmarshal(a, &c); err != nil { + + mergedContainerByte, err := strategicpatch.StrategicMergePatch(a, b, apiv1.Container{}) + if err != nil { return nil, err } - if err = json.Unmarshal(b, &c); err != nil { + c = apiv1.Container{} + if err := json.Unmarshal(mergedContainerByte, &c); err != nil { return nil, err } } diff --git a/workflow/controller/workflowpod_test.go b/workflow/controller/workflowpod_test.go index 04e75598f0a7..bab6dad04a6c 100644 --- a/workflow/controller/workflowpod_test.go +++ b/workflow/controller/workflowpod_test.go @@ -1904,3 +1904,88 @@ func TestProgressEnvVars(t *testing.T) { }) }) } + +var helloWorldWfWithEnvReferSecret = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + name: hello-world +spec: + entrypoint: whalesay + templates: + - name: whalesay + metadata: + annotations: + annotationKey1: "annotationValue1" + annotationKey2: "annotationValue2" + labels: + labelKey1: "labelValue1" + labelKey2: "labelValue2" + container: + image: docker/whalesay:latest + command: [cowsay] + args: ["hello world"] + env: + - name: ENV3 + valueFrom: + secretKeyRef: + name: mysecret + key: sec +` + +func TestMergeEnvVars(t *testing.T) { + setup := func(t *testing.T, options ...interface{}) (context.CancelFunc, *apiv1.Pod) { + cancel, controller := newController(options...) + + wf := wfv1.MustUnmarshalWorkflow(helloWorldWfWithEnvReferSecret) + ctx := context.Background() + woc := newWorkflowOperationCtx(wf, controller) + err := woc.setExecWorkflow(ctx) + require.NoError(t, err) + mainCtrSpec := &apiv1.Container{ + Name: common.MainContainerName, + SecurityContext: &apiv1.SecurityContext{}, + Env: []apiv1.EnvVar{ + { + Name: "ENV1", + Value: "env1", + }, + { + Name: "ENV2", + Value: "env2", + }, + }, + } + woc.controller.Config.MainContainer = mainCtrSpec + mainCtr := woc.execWf.Spec.Templates[0].Container + + pod, err := woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) + require.NoError(t, err) + assert.NotNil(t, pod) + return cancel, pod + } + + t.Run("test merge envs", func(t *testing.T) { + cancel, pod := setup(t) + defer cancel() + assert.Contains(t, pod.Spec.Containers[1].Env, apiv1.EnvVar{ + Name: "ENV1", + Value: "env1", + }) + assert.Contains(t, pod.Spec.Containers[1].Env, apiv1.EnvVar{ + Name: "ENV2", + Value: "env2", + }) + assert.Contains(t, pod.Spec.Containers[1].Env, apiv1.EnvVar{ + Name: "ENV3", + ValueFrom: &apiv1.EnvVarSource{ + SecretKeyRef: &apiv1.SecretKeySelector{ + LocalObjectReference: apiv1.LocalObjectReference{ + Name: "mysecret", + }, + Key: "sec", + }, + }, + }) + }) +} From ae915fe9ffae19fc721a790b7611a2428a23c845 Mon Sep 17 00:00:00 2001 From: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Date: Fri, 5 Jan 2024 13:05:13 -0500 Subject: [PATCH 31/38] fix(docs): handle `fields` examples with `md_in_html` (#12465) Signed-off-by: Anton Gilgur Signed-off-by: Isitha Subasinghe From 9a4c787e71e57edfe8a554a2f8f922cbe530430c Mon Sep 17 00:00:00 2001 From: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Date: Fri, 5 Jan 2024 09:44:53 -0500 Subject: [PATCH 32/38] fix(docs): exclude `docs/requirements.txt` from docs build (#12466) Signed-off-by: Anton Gilgur Signed-off-by: Isitha Subasinghe From 72deab92a5dec7b8df87109fb54398509ce24639 Mon Sep 17 00:00:00 2001 From: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Date: Thu, 4 Jan 2024 16:03:55 -0500 Subject: [PATCH 33/38] fix(docs): render Mermaid diagrams in docs (#12464) Signed-off-by: Isitha Subasinghe From dd8b4705bdc3e3207e70eba70af7f72fb812cd3d Mon Sep 17 00:00:00 2001 From: Jason Meridth Date: Tue, 2 Jan 2024 16:53:30 -0600 Subject: [PATCH 34/38] fix: documentation links (#12446) Signed-off-by: jmeridth Signed-off-by: Isitha Subasinghe --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 74fde90e3a67..e64088e9092e 100644 --- a/README.md +++ b/README.md @@ -70,7 +70,7 @@ Check out our [Java, Golang and Python clients](docs/client-libraries.md). ## Quickstart -* [Get started here](docs/quick-start.md) +* [Get started here](https://argo-workflows.readthedocs.io/en/release-3.5/quick-start/) * [Walk-through examples](https://argo-workflows.readthedocs.io/en/release-3.5/walk-through/) ## Documentation From c2905bda5c9962fa64474a39a6e0c9b0a842e8c2 Mon Sep 17 00:00:00 2001 From: Tal Yitzhak Date: Thu, 8 Feb 2024 02:22:48 +0200 Subject: [PATCH 35/38] chore(deps): fixed medium CVE in github.com/docker/docker v24.0.0+incompatible (#12635) Signed-off-by: Tal Yitzhak Signed-off-by: Isitha Subasinghe --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 8c444c7657a9..2967a98a5c38 100644 --- a/go.mod +++ b/go.mod @@ -162,7 +162,7 @@ require ( github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/daviddengcn/go-colortext v0.0.0-20160507010035-511bcaf42ccd // indirect github.com/dimchansky/utfbom v1.1.1 // indirect - github.com/docker/cli v24.0.0+incompatible // indirect + github.com/docker/cli v24.0.7+incompatible // indirect github.com/docker/distribution v2.8.2+incompatible // indirect github.com/docker/docker v24.0.0+incompatible // indirect github.com/docker/docker-credential-helpers v0.7.0 // indirect diff --git a/go.sum b/go.sum index d366a08aead2..5adf67231cf5 100644 --- a/go.sum +++ b/go.sum @@ -266,8 +266,8 @@ github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= -github.com/docker/cli v24.0.0+incompatible h1:0+1VshNwBQzQAx9lOl+OYCTCEAD8fKs/qeXMx3O0wqM= -github.com/docker/cli v24.0.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli v24.0.7+incompatible h1:wa/nIwYFW7BVTGa7SWPVyyXU9lgORqUb1xfI36MSkFg= +github.com/docker/cli v24.0.7+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= From 6ee52fc96e700190de96a15993b933a26f0389c9 Mon Sep 17 00:00:00 2001 From: Eduardo Rodrigues Date: Sun, 25 Feb 2024 15:05:33 +0000 Subject: [PATCH 36/38] fix: make WF global parameters available in retries (#12698) Signed-off-by: eduardodbr (cherry picked from commit 9bec11438cc14758f363e36be444986b9fd7782b) --- workflow/controller/operator.go | 2 +- workflow/controller/operator_test.go | 57 ++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+), 1 deletion(-) diff --git a/workflow/controller/operator.go b/workflow/controller/operator.go index a273233783ac..8b0a6e16b810 100644 --- a/workflow/controller/operator.go +++ b/workflow/controller/operator.go @@ -2124,7 +2124,7 @@ func (woc *wfOperationCtx) executeTemplate(ctx context.Context, nodeName string, // Inject the retryAttempt number localParams[common.LocalVarRetries] = strconv.Itoa(retryNum) - processedTmpl, err = common.SubstituteParams(processedTmpl, map[string]string{}, localParams) + processedTmpl, err = common.SubstituteParams(processedTmpl, woc.globalParams, localParams) if errorsutil.IsTransientErr(err) { return node, err } diff --git a/workflow/controller/operator_test.go b/workflow/controller/operator_test.go index 3c0e5d07bb54..31267e6ea67c 100644 --- a/workflow/controller/operator_test.go +++ b/workflow/controller/operator_test.go @@ -1322,6 +1322,63 @@ func TestRetriesVariableInPodSpecPatch(t *testing.T) { assert.ElementsMatch(t, actual, expected) } +var retriesVariableWithGlobalVariablesInPodSpecPatchTemplate = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + name: whalesay +spec: + entrypoint: whalesay + arguments: + parameters: + - name: memreqnum + value: 100 + templates: + - name: whalesay + retryStrategy: + limit: 10 + podSpecPatch: | + containers: + - name: main + resources: + limits: + memory: "{{= (sprig.int(retries)+1)* sprig.int(workflow.parameters.memreqnum)}}Mi" + container: + image: docker/whalesay:latest + command: [sh, -c] + args: ["cowsay hello"] +` + +func TestRetriesVariableWithGlobalVariableInPodSpecPatch(t *testing.T) { + wf := wfv1.MustUnmarshalWorkflow(retriesVariableWithGlobalVariablesInPodSpecPatchTemplate) + cancel, controller := newController(wf) + defer cancel() + ctx := context.Background() + iterations := 5 + var woc *wfOperationCtx + for i := 1; i <= iterations; i++ { + woc = newWorkflowOperationCtx(wf, controller) + if i != 1 { + makePodsPhase(ctx, woc, apiv1.PodFailed) + } + woc.operate(ctx) + wf = woc.wf + } + + pods, err := listPods(woc) + assert.NoError(t, err) + assert.Len(t, pods.Items, iterations) + expected := []string{} + actual := []string{} + for i := 0; i < iterations; i++ { + actual = append(actual, pods.Items[i].Spec.Containers[1].Resources.Limits.Memory().String()) + expected = append(expected, fmt.Sprintf("%dMi", (i+1)*100)) + } + // expecting memory limit to increase after each retry: "100Mi", "200Mi", "300Mi", "400Mi", "500Mi" + // ordering not preserved + assert.ElementsMatch(t, actual, expected) +} + var stepsRetriesVariableTemplate = ` apiVersion: argoproj.io/v1alpha1 kind: Workflow From 6af917eb322bb84a2733723433a9eb87b7f1e85d Mon Sep 17 00:00:00 2001 From: Yulin Li Date: Sun, 21 Jan 2024 12:24:27 +0800 Subject: [PATCH 37/38] chore(deps): bump github.com/cloudflare/circl to 1.3.7 to fix GHSA-9763-4f94-gfch (#12556) Signed-off-by: Yulin Li Co-authored-by: Yulin Li Co-authored-by: Anton Gilgur Signed-off-by: Anton Gilgur --- go.mod | 4 ++-- go.sum | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/go.mod b/go.mod index 2967a98a5c38..7ab35d0a6cb9 100644 --- a/go.mod +++ b/go.mod @@ -55,7 +55,7 @@ require ( github.com/upper/db/v4 v4.7.0 github.com/valyala/fasttemplate v1.2.2 github.com/xeipuuv/gojsonschema v1.2.0 - golang.org/x/crypto v0.16.0 + golang.org/x/crypto v0.17.0 golang.org/x/exp v0.0.0-20230905200255-921286631fa9 golang.org/x/oauth2 v0.13.0 golang.org/x/sync v0.5.0 @@ -79,7 +79,7 @@ require ( require ( dario.cat/mergo v1.0.0 // indirect github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68 // indirect - github.com/cloudflare/circl v1.3.3 // indirect + github.com/cloudflare/circl v1.3.7 // indirect github.com/containerd/stargz-snapshotter/estargz v0.14.3 // indirect github.com/cyphar/filepath-securejoin v0.2.4 // indirect github.com/evilmonkeyinc/jsonpath v0.8.1 // indirect diff --git a/go.sum b/go.sum index 5adf67231cf5..77d0a75daadd 100644 --- a/go.sum +++ b/go.sum @@ -222,8 +222,9 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs= github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= +github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= +github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= @@ -1105,8 +1106,8 @@ golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= -golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY= -golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20181106170214-d68db9428509/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= From c80b2e91ebd7e7f604e88442f45ec630380effa0 Mon Sep 17 00:00:00 2001 From: Anton Gilgur <4970083+agilgur5@users.noreply.github.com> Date: Fri, 16 Feb 2024 23:18:44 -0500 Subject: [PATCH 38/38] docs: consistently use "> v{version} and after" (#12581) Signed-off-by: Anton Gilgur --- .spelling | 1 + docs/argo-server-sso.md | 2 +- docs/artifact-visualization.md | 2 +- docs/cluster-workflow-templates.md | 12 ++++++------ docs/executor_plugins.md | 2 +- docs/high-availability.md | 6 +++--- docs/resource-template.md | 2 +- docs/scaling.md | 2 +- docs/suspend-template.md | 2 +- docs/variables.md | 4 ++-- docs/workflow-events.md | 5 +++-- docs/workflow-templates.md | 8 ++++---- 12 files changed, 25 insertions(+), 23 deletions(-) diff --git a/.spelling b/.spelling index ae9c1ac6b8bc..eb1e2bc90fca 100644 --- a/.spelling +++ b/.spelling @@ -215,6 +215,7 @@ v2 v2.0 v2.1 v2.10 +v2.10.2 v2.11 v2.12 v2.35.0 diff --git a/docs/argo-server-sso.md b/docs/argo-server-sso.md index 4ab4927748b8..46394dd08064 100644 --- a/docs/argo-server-sso.md +++ b/docs/argo-server-sso.md @@ -197,7 +197,7 @@ workflows.argoproj.io/rbac-rule: "'argo_admins' in groups" ## Filtering groups -> v3.5 and above +> v3.5 and after You can configure `filterGroupsRegex` to filter the groups returned by the OIDC provider. Some use-cases for this include: diff --git a/docs/artifact-visualization.md b/docs/artifact-visualization.md index 962be56d1473..ba68fdf1baff 100644 --- a/docs/artifact-visualization.md +++ b/docs/artifact-visualization.md @@ -1,6 +1,6 @@ # Artifact Visualization -> since v3.4 +> v3.4 and after Artifacts can be viewed in the UI. diff --git a/docs/cluster-workflow-templates.md b/docs/cluster-workflow-templates.md index 3a935534ebac..4d133bf502b1 100644 --- a/docs/cluster-workflow-templates.md +++ b/docs/cluster-workflow-templates.md @@ -56,10 +56,10 @@ spec: value: "hello world" ``` -> 2.9 and after - ### Create `Workflow` from `ClusterWorkflowTemplate` Spec +> v2.9 and after + You can create `Workflow` from `ClusterWorkflowTemplate` spec using `workflowTemplateRef` with `clusterScope: true`. If you pass the arguments to created `Workflow`, it will be merged with cluster workflow template arguments Here is an example for `ClusterWorkflowTemplate` with `entrypoint` and `arguments` @@ -129,15 +129,15 @@ You can create some example templates as follows: argo cluster-template create https://raw.githubusercontent.com/argoproj/argo-workflows/main/examples/cluster-workflow-template/clustertemplates.yaml ``` -The submit a workflow using one of those templates: +Then submit a `Workflow` using one of those templates: ```bash argo submit https://raw.githubusercontent.com/argoproj/argo-workflows/main/examples/cluster-workflow-template/cluster-wftmpl-dag.yaml ``` -> 2.7 and after -> -The submit a `ClusterWorkflowTemplate` as a `Workflow`: +> v2.7 and after + +Then submit a `ClusterWorkflowTemplate` as a `Workflow`: ```bash argo submit --from clusterworkflowtemplate/workflow-template-submittable diff --git a/docs/executor_plugins.md b/docs/executor_plugins.md index 69acb5a729e4..679237f142ae 100644 --- a/docs/executor_plugins.md +++ b/docs/executor_plugins.md @@ -1,6 +1,6 @@ # Executor Plugins -> Since v3.3 +> v3.3 and after ## Configuration diff --git a/docs/high-availability.md b/docs/high-availability.md index fb2a245f2cf4..c1a78de76ee7 100644 --- a/docs/high-availability.md +++ b/docs/high-availability.md @@ -4,7 +4,7 @@ Before v3.0, only one controller could run at once. (If it crashed, Kubernetes would start another pod.) -> v3.0 +> v3.0 and after For many users, a short loss of workflow service may be acceptable - the new controller will just continue running workflows if it restarts. However, with high service guarantees, new pods may take too long to start running workflows. @@ -18,9 +18,9 @@ Budget to prevent this and Pod Priority to recover faster from an involuntary po ## Argo Server -> v2.6 +> v2.6 and after Run a minimum of two replicas, typically three, should be run, otherwise it may be possible that API and webhook requests are dropped. !!! Tip - Consider using [multi AZ-deployment using pod anti-affinity](https://www.verygoodsecurity.com/blog/posts/kubernetes-multi-az-deployments-using-pod-anti-affinity). + Consider [spreading Pods across multiple availability zones](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/). diff --git a/docs/resource-template.md b/docs/resource-template.md index e3b09c7f872b..464f0e04b03d 100644 --- a/docs/resource-template.md +++ b/docs/resource-template.md @@ -1,5 +1,5 @@ # Resource Template -> v2.0 +> v2.0 and after See [Kubernetes Resources](walk-through/kubernetes-resources.md). diff --git a/docs/scaling.md b/docs/scaling.md index 8f9a85aa0032..0b3d4130c089 100644 --- a/docs/scaling.md +++ b/docs/scaling.md @@ -6,7 +6,7 @@ For running large workflows, you'll typically need to scale the controller to ma You cannot horizontally scale the controller. -> v3.0 +> v3.0 and after As of v3.0, the controller supports having a hot-standby for [High Availability](high-availability.md#workflow-controller). diff --git a/docs/suspend-template.md b/docs/suspend-template.md index a749be6d02cd..7737275b29a2 100644 --- a/docs/suspend-template.md +++ b/docs/suspend-template.md @@ -1,5 +1,5 @@ # Suspend Template -> v2.1 +> v2.1 and after See [Suspending](walk-through/suspending.md). diff --git a/docs/variables.md b/docs/variables.md index 669fd622341f..501dc9fd5559 100644 --- a/docs/variables.md +++ b/docs/variables.md @@ -49,7 +49,7 @@ args: [ "{{ inputs.parameters.message }}" ] ### Expression -> Since v3.1 +> v3.1 and after The tag is substituted with the result of evaluating the tag as an expression. @@ -170,7 +170,7 @@ sprig.trim(inputs.parameters['my-string-param']) ### HTTP Templates -> Since v3.3 +> v3.3 and after Only available for `successCondition` diff --git a/docs/workflow-events.md b/docs/workflow-events.md index 0e906b8487b0..ff50178fdba2 100644 --- a/docs/workflow-events.md +++ b/docs/workflow-events.md @@ -1,8 +1,9 @@ # Workflow Events -> v2.7.2 +> v2.7.2 and after -⚠️ Do not use Kubernetes events for automation. Events maybe lost or rolled-up. +!!! Warning "Kubernetes events" + Do not use Kubernetes events for automation as they can be lost or rolled-up. We emit Kubernetes events on certain events. diff --git a/docs/workflow-templates.md b/docs/workflow-templates.md index 1eaf9392a059..30fe124eea22 100644 --- a/docs/workflow-templates.md +++ b/docs/workflow-templates.md @@ -110,7 +110,7 @@ spec: ### Adding labels/annotations to Workflows with `workflowMetadata` -> 2.10.2 and after +> v2.10.2 and after To automatically add labels and/or annotations to Workflows created from `WorkflowTemplates`, use `workflowMetadata`. @@ -282,10 +282,10 @@ to pass in "live" arguments and reference other templates (those other templates This behavior has been problematic and dangerous. It causes confusion and has design inconsistencies. -> 2.9 and after - ### Create `Workflow` from `WorkflowTemplate` Spec +> v2.9 and after + You can create `Workflow` from `WorkflowTemplate` spec using `workflowTemplateRef`. If you pass the arguments to created `Workflow`, it will be merged with workflow template arguments. Here is an example for referring `WorkflowTemplate` as Workflow with passing `entrypoint` and `Workflow Arguments` to `WorkflowTemplate` @@ -333,7 +333,7 @@ Then submit a workflow using one of those templates: argo submit https://raw.githubusercontent.com/argoproj/argo-workflows/main/examples/workflow-template/hello-world.yaml ``` -> 2.7 and after +> v2.7 and after Then submit a `WorkflowTemplate` as a `Workflow`: