diff --git a/pkgs/core/schemas/0050_tables_definitions.sql b/pkgs/core/schemas/0050_tables_definitions.sql index 74c3d8b57..5b69de9b9 100644 --- a/pkgs/core/schemas/0050_tables_definitions.sql +++ b/pkgs/core/schemas/0050_tables_definitions.sql @@ -24,7 +24,8 @@ create table pgflow.steps ( opt_base_delay int, opt_timeout int, opt_start_delay int, - condition_pattern jsonb, -- JSON pattern for @> containment check + condition_pattern jsonb, -- JSON pattern for @> containment check (if) + condition_not_pattern jsonb, -- JSON pattern for NOT @> containment check (ifNot) when_unmet text not null default 'skip', -- What to do when condition not met (skip is natural default) when_failed text not null default 'fail', -- What to do when handler fails after retries created_at timestamptz not null default now(), diff --git a/pkgs/core/schemas/0100_function_add_step.sql b/pkgs/core/schemas/0100_function_add_step.sql index 0eecdcd7c..06a9dabd9 100644 --- a/pkgs/core/schemas/0100_function_add_step.sql +++ b/pkgs/core/schemas/0100_function_add_step.sql @@ -8,6 +8,7 @@ create or replace function pgflow.add_step( start_delay int default null, step_type text default 'single', condition_pattern jsonb default null, + condition_not_pattern jsonb default null, when_unmet text default 'skip', when_failed text default 'fail' ) @@ -40,7 +41,7 @@ BEGIN INSERT INTO pgflow.steps ( flow_slug, step_slug, step_type, step_index, deps_count, opt_max_attempts, opt_base_delay, opt_timeout, opt_start_delay, - condition_pattern, when_unmet, when_failed + condition_pattern, condition_not_pattern, when_unmet, when_failed ) VALUES ( add_step.flow_slug, @@ -53,6 +54,7 @@ BEGIN add_step.timeout, add_step.start_delay, add_step.condition_pattern, + add_step.condition_not_pattern, add_step.when_unmet, add_step.when_failed ) diff --git a/pkgs/core/schemas/0100_function_cascade_resolve_conditions.sql b/pkgs/core/schemas/0100_function_cascade_resolve_conditions.sql index 04a21893f..36ce12d6f 100644 --- a/pkgs/core/schemas/0100_function_cascade_resolve_conditions.sql +++ b/pkgs/core/schemas/0100_function_cascade_resolve_conditions.sql @@ -46,11 +46,15 @@ BEGIN -- PHASE 1a: CHECK FOR FAIL CONDITIONS -- ========================================== -- Find first step (by topological order) with unmet condition and 'fail' mode. + -- Condition is unmet when: + -- (condition_pattern is set AND input does NOT contain it) OR + -- (condition_not_pattern is set AND input DOES contain it) WITH steps_with_conditions AS ( SELECT step_state.flow_slug, step_state.step_slug, step.condition_pattern, + step.condition_not_pattern, step.when_unmet, step.deps_count, step.step_index @@ -61,7 +65,7 @@ BEGIN WHERE step_state.run_id = cascade_resolve_conditions.run_id AND step_state.status = 'created' AND step_state.remaining_deps = 0 - AND step.condition_pattern IS NOT NULL + AND (step.condition_pattern IS NOT NULL OR step.condition_not_pattern IS NOT NULL) ), step_deps_output AS ( SELECT @@ -79,14 +83,17 @@ BEGIN condition_evaluations AS ( SELECT swc.*, - CASE - WHEN swc.deps_count = 0 THEN v_run_input @> swc.condition_pattern - ELSE COALESCE(sdo.deps_output, '{}'::jsonb) @> swc.condition_pattern - END AS condition_met + -- condition_met = (if IS NULL OR input @> if) AND (ifNot IS NULL OR NOT(input @> ifNot)) + (swc.condition_pattern IS NULL OR + CASE WHEN swc.deps_count = 0 THEN v_run_input ELSE COALESCE(sdo.deps_output, '{}'::jsonb) END @> swc.condition_pattern) + AND + (swc.condition_not_pattern IS NULL OR + NOT (CASE WHEN swc.deps_count = 0 THEN v_run_input ELSE COALESCE(sdo.deps_output, '{}'::jsonb) END @> swc.condition_not_pattern)) + AS condition_met FROM steps_with_conditions swc LEFT JOIN step_deps_output sdo ON sdo.step_slug = swc.step_slug ) - SELECT flow_slug, step_slug, condition_pattern + SELECT flow_slug, step_slug, condition_pattern, condition_not_pattern INTO v_first_fail FROM condition_evaluations WHERE NOT condition_met AND when_unmet = 'fail' @@ -94,11 +101,13 @@ BEGIN LIMIT 1; -- Handle fail mode: fail step and run, return false - IF v_first_fail IS NOT NULL THEN + -- Note: Cannot use "v_first_fail IS NOT NULL" because records with NULL fields + -- evaluate to NULL in IS NOT NULL checks. Use FOUND instead. + IF FOUND THEN UPDATE pgflow.step_states SET status = 'failed', failed_at = now(), - error_message = 'Condition not met: ' || v_first_fail.condition_pattern::text + error_message = 'Condition not met' WHERE pgflow.step_states.run_id = cascade_resolve_conditions.run_id AND pgflow.step_states.step_slug = v_first_fail.step_slug; @@ -114,12 +123,13 @@ BEGIN -- PHASE 1b: HANDLE SKIP CONDITIONS (with propagation) -- ========================================== -- Skip steps with unmet conditions and whenUnmet='skip'. - -- NEW: Also decrement remaining_deps on dependents and set initial_tasks=0 for map dependents. + -- Also decrement remaining_deps on dependents and set initial_tasks=0 for map dependents. WITH steps_with_conditions AS ( SELECT step_state.flow_slug, step_state.step_slug, step.condition_pattern, + step.condition_not_pattern, step.when_unmet, step.deps_count, step.step_index @@ -130,7 +140,7 @@ BEGIN WHERE step_state.run_id = cascade_resolve_conditions.run_id AND step_state.status = 'created' AND step_state.remaining_deps = 0 - AND step.condition_pattern IS NOT NULL + AND (step.condition_pattern IS NOT NULL OR step.condition_not_pattern IS NOT NULL) ), step_deps_output AS ( SELECT @@ -148,10 +158,13 @@ BEGIN condition_evaluations AS ( SELECT swc.*, - CASE - WHEN swc.deps_count = 0 THEN v_run_input @> swc.condition_pattern - ELSE COALESCE(sdo.deps_output, '{}'::jsonb) @> swc.condition_pattern - END AS condition_met + -- condition_met = (if IS NULL OR input @> if) AND (ifNot IS NULL OR NOT(input @> ifNot)) + (swc.condition_pattern IS NULL OR + CASE WHEN swc.deps_count = 0 THEN v_run_input ELSE COALESCE(sdo.deps_output, '{}'::jsonb) END @> swc.condition_pattern) + AND + (swc.condition_not_pattern IS NULL OR + NOT (CASE WHEN swc.deps_count = 0 THEN v_run_input ELSE COALESCE(sdo.deps_output, '{}'::jsonb) END @> swc.condition_not_pattern)) + AS condition_met FROM steps_with_conditions swc LEFT JOIN step_deps_output sdo ON sdo.step_slug = swc.step_slug ), @@ -231,13 +244,15 @@ BEGIN WHERE ready_step.run_id = cascade_resolve_conditions.run_id AND ready_step.status = 'created' AND ready_step.remaining_deps = 0 - AND step.condition_pattern IS NOT NULL + AND (step.condition_pattern IS NOT NULL OR step.condition_not_pattern IS NOT NULL) AND step.when_unmet = 'skip-cascade' + -- Condition is NOT met when: (if fails) OR (ifNot fails) AND NOT ( - CASE - WHEN step.deps_count = 0 THEN v_run_input @> step.condition_pattern - ELSE COALESCE(agg_deps.deps_output, '{}'::jsonb) @> step.condition_pattern - END + (step.condition_pattern IS NULL OR + CASE WHEN step.deps_count = 0 THEN v_run_input ELSE COALESCE(agg_deps.deps_output, '{}'::jsonb) END @> step.condition_pattern) + AND + (step.condition_not_pattern IS NULL OR + NOT (CASE WHEN step.deps_count = 0 THEN v_run_input ELSE COALESCE(agg_deps.deps_output, '{}'::jsonb) END @> step.condition_not_pattern)) ) ORDER BY step.step_index; diff --git a/pkgs/core/src/database-types.ts b/pkgs/core/src/database-types.ts index a89789c36..1d2966461 100644 --- a/pkgs/core/src/database-types.ts +++ b/pkgs/core/src/database-types.ts @@ -278,6 +278,7 @@ export type Database = { } steps: { Row: { + condition_not_pattern: Json | null condition_pattern: Json | null created_at: string deps_count: number @@ -293,6 +294,7 @@ export type Database = { when_unmet: string } Insert: { + condition_not_pattern?: Json | null condition_pattern?: Json | null created_at?: string deps_count?: number @@ -308,6 +310,7 @@ export type Database = { when_unmet?: string } Update: { + condition_not_pattern?: Json | null condition_pattern?: Json | null created_at?: string deps_count?: number @@ -410,6 +413,7 @@ export type Database = { add_step: { Args: { base_delay?: number + condition_not_pattern?: Json condition_pattern?: Json deps_slugs?: string[] flow_slug: string @@ -422,6 +426,7 @@ export type Database = { when_unmet?: string } Returns: { + condition_not_pattern: Json | null condition_pattern: Json | null created_at: string deps_count: number diff --git a/pkgs/core/supabase/migrations/20260105214940_pgflow_step_conditions.sql b/pkgs/core/supabase/migrations/20260108131350_pgflow_step_conditions.sql similarity index 95% rename from pkgs/core/supabase/migrations/20260105214940_pgflow_step_conditions.sql rename to pkgs/core/supabase/migrations/20260108131350_pgflow_step_conditions.sql index b28e8044b..840c2d134 100644 --- a/pkgs/core/supabase/migrations/20260105214940_pgflow_step_conditions.sql +++ b/pkgs/core/supabase/migrations/20260108131350_pgflow_step_conditions.sql @@ -15,7 +15,7 @@ END) <= 1), ADD CONSTRAINT "skip_reason_matches_status" CHECK (((status = 'skipp -- Create index "idx_step_states_skipped" to table: "step_states" CREATE INDEX "idx_step_states_skipped" ON "pgflow"."step_states" ("run_id", "step_slug") WHERE (status = 'skipped'::text); -- Modify "steps" table -ALTER TABLE "pgflow"."steps" ADD CONSTRAINT "when_failed_is_valid" CHECK (when_failed = ANY (ARRAY['fail'::text, 'skip'::text, 'skip-cascade'::text])), ADD CONSTRAINT "when_unmet_is_valid" CHECK (when_unmet = ANY (ARRAY['fail'::text, 'skip'::text, 'skip-cascade'::text])), ADD COLUMN "condition_pattern" jsonb NULL, ADD COLUMN "when_unmet" text NOT NULL DEFAULT 'skip', ADD COLUMN "when_failed" text NOT NULL DEFAULT 'fail'; +ALTER TABLE "pgflow"."steps" ADD CONSTRAINT "when_failed_is_valid" CHECK (when_failed = ANY (ARRAY['fail'::text, 'skip'::text, 'skip-cascade'::text])), ADD CONSTRAINT "when_unmet_is_valid" CHECK (when_unmet = ANY (ARRAY['fail'::text, 'skip'::text, 'skip-cascade'::text])), ADD COLUMN "condition_pattern" jsonb NULL, ADD COLUMN "condition_not_pattern" jsonb NULL, ADD COLUMN "when_unmet" text NOT NULL DEFAULT 'skip', ADD COLUMN "when_failed" text NOT NULL DEFAULT 'fail'; -- Create "_cascade_force_skip_steps" function CREATE FUNCTION "pgflow"."_cascade_force_skip_steps" ("run_id" uuid, "step_slug" text, "skip_reason" text) RETURNS integer LANGUAGE plpgsql AS $$ DECLARE @@ -151,11 +151,15 @@ BEGIN -- PHASE 1a: CHECK FOR FAIL CONDITIONS -- ========================================== -- Find first step (by topological order) with unmet condition and 'fail' mode. + -- Condition is unmet when: + -- (condition_pattern is set AND input does NOT contain it) OR + -- (condition_not_pattern is set AND input DOES contain it) WITH steps_with_conditions AS ( SELECT step_state.flow_slug, step_state.step_slug, step.condition_pattern, + step.condition_not_pattern, step.when_unmet, step.deps_count, step.step_index @@ -166,7 +170,7 @@ BEGIN WHERE step_state.run_id = cascade_resolve_conditions.run_id AND step_state.status = 'created' AND step_state.remaining_deps = 0 - AND step.condition_pattern IS NOT NULL + AND (step.condition_pattern IS NOT NULL OR step.condition_not_pattern IS NOT NULL) ), step_deps_output AS ( SELECT @@ -184,14 +188,17 @@ BEGIN condition_evaluations AS ( SELECT swc.*, - CASE - WHEN swc.deps_count = 0 THEN v_run_input @> swc.condition_pattern - ELSE COALESCE(sdo.deps_output, '{}'::jsonb) @> swc.condition_pattern - END AS condition_met + -- condition_met = (if IS NULL OR input @> if) AND (ifNot IS NULL OR NOT(input @> ifNot)) + (swc.condition_pattern IS NULL OR + CASE WHEN swc.deps_count = 0 THEN v_run_input ELSE COALESCE(sdo.deps_output, '{}'::jsonb) END @> swc.condition_pattern) + AND + (swc.condition_not_pattern IS NULL OR + NOT (CASE WHEN swc.deps_count = 0 THEN v_run_input ELSE COALESCE(sdo.deps_output, '{}'::jsonb) END @> swc.condition_not_pattern)) + AS condition_met FROM steps_with_conditions swc LEFT JOIN step_deps_output sdo ON sdo.step_slug = swc.step_slug ) - SELECT flow_slug, step_slug, condition_pattern + SELECT flow_slug, step_slug, condition_pattern, condition_not_pattern INTO v_first_fail FROM condition_evaluations WHERE NOT condition_met AND when_unmet = 'fail' @@ -199,11 +206,13 @@ BEGIN LIMIT 1; -- Handle fail mode: fail step and run, return false - IF v_first_fail IS NOT NULL THEN + -- Note: Cannot use "v_first_fail IS NOT NULL" because records with NULL fields + -- evaluate to NULL in IS NOT NULL checks. Use FOUND instead. + IF FOUND THEN UPDATE pgflow.step_states SET status = 'failed', failed_at = now(), - error_message = 'Condition not met: ' || v_first_fail.condition_pattern::text + error_message = 'Condition not met' WHERE pgflow.step_states.run_id = cascade_resolve_conditions.run_id AND pgflow.step_states.step_slug = v_first_fail.step_slug; @@ -219,12 +228,13 @@ BEGIN -- PHASE 1b: HANDLE SKIP CONDITIONS (with propagation) -- ========================================== -- Skip steps with unmet conditions and whenUnmet='skip'. - -- NEW: Also decrement remaining_deps on dependents and set initial_tasks=0 for map dependents. + -- Also decrement remaining_deps on dependents and set initial_tasks=0 for map dependents. WITH steps_with_conditions AS ( SELECT step_state.flow_slug, step_state.step_slug, step.condition_pattern, + step.condition_not_pattern, step.when_unmet, step.deps_count, step.step_index @@ -235,7 +245,7 @@ BEGIN WHERE step_state.run_id = cascade_resolve_conditions.run_id AND step_state.status = 'created' AND step_state.remaining_deps = 0 - AND step.condition_pattern IS NOT NULL + AND (step.condition_pattern IS NOT NULL OR step.condition_not_pattern IS NOT NULL) ), step_deps_output AS ( SELECT @@ -253,10 +263,13 @@ BEGIN condition_evaluations AS ( SELECT swc.*, - CASE - WHEN swc.deps_count = 0 THEN v_run_input @> swc.condition_pattern - ELSE COALESCE(sdo.deps_output, '{}'::jsonb) @> swc.condition_pattern - END AS condition_met + -- condition_met = (if IS NULL OR input @> if) AND (ifNot IS NULL OR NOT(input @> ifNot)) + (swc.condition_pattern IS NULL OR + CASE WHEN swc.deps_count = 0 THEN v_run_input ELSE COALESCE(sdo.deps_output, '{}'::jsonb) END @> swc.condition_pattern) + AND + (swc.condition_not_pattern IS NULL OR + NOT (CASE WHEN swc.deps_count = 0 THEN v_run_input ELSE COALESCE(sdo.deps_output, '{}'::jsonb) END @> swc.condition_not_pattern)) + AS condition_met FROM steps_with_conditions swc LEFT JOIN step_deps_output sdo ON sdo.step_slug = swc.step_slug ), @@ -336,13 +349,15 @@ BEGIN WHERE ready_step.run_id = cascade_resolve_conditions.run_id AND ready_step.status = 'created' AND ready_step.remaining_deps = 0 - AND step.condition_pattern IS NOT NULL + AND (step.condition_pattern IS NOT NULL OR step.condition_not_pattern IS NOT NULL) AND step.when_unmet = 'skip-cascade' + -- Condition is NOT met when: (if fails) OR (ifNot fails) AND NOT ( - CASE - WHEN step.deps_count = 0 THEN v_run_input @> step.condition_pattern - ELSE COALESCE(agg_deps.deps_output, '{}'::jsonb) @> step.condition_pattern - END + (step.condition_pattern IS NULL OR + CASE WHEN step.deps_count = 0 THEN v_run_input ELSE COALESCE(agg_deps.deps_output, '{}'::jsonb) END @> step.condition_pattern) + AND + (step.condition_not_pattern IS NULL OR + NOT (CASE WHEN step.deps_count = 0 THEN v_run_input ELSE COALESCE(agg_deps.deps_output, '{}'::jsonb) END @> step.condition_not_pattern)) ) ORDER BY step.step_index; @@ -1440,7 +1455,7 @@ with tasks as ( dep_out.step_slug = st.step_slug $$; -- Create "add_step" function -CREATE FUNCTION "pgflow"."add_step" ("flow_slug" text, "step_slug" text, "deps_slugs" text[] DEFAULT '{}', "max_attempts" integer DEFAULT NULL::integer, "base_delay" integer DEFAULT NULL::integer, "timeout" integer DEFAULT NULL::integer, "start_delay" integer DEFAULT NULL::integer, "step_type" text DEFAULT 'single', "condition_pattern" jsonb DEFAULT NULL::jsonb, "when_unmet" text DEFAULT 'skip', "when_failed" text DEFAULT 'fail') RETURNS "pgflow"."steps" LANGUAGE plpgsql SET "search_path" = '' AS $$ +CREATE FUNCTION "pgflow"."add_step" ("flow_slug" text, "step_slug" text, "deps_slugs" text[] DEFAULT '{}', "max_attempts" integer DEFAULT NULL::integer, "base_delay" integer DEFAULT NULL::integer, "timeout" integer DEFAULT NULL::integer, "start_delay" integer DEFAULT NULL::integer, "step_type" text DEFAULT 'single', "condition_pattern" jsonb DEFAULT NULL::jsonb, "condition_not_pattern" jsonb DEFAULT NULL::jsonb, "when_unmet" text DEFAULT 'skip', "when_failed" text DEFAULT 'fail') RETURNS "pgflow"."steps" LANGUAGE plpgsql SET "search_path" = '' AS $$ DECLARE result_step pgflow.steps; next_idx int; @@ -1465,7 +1480,7 @@ BEGIN INSERT INTO pgflow.steps ( flow_slug, step_slug, step_type, step_index, deps_count, opt_max_attempts, opt_base_delay, opt_timeout, opt_start_delay, - condition_pattern, when_unmet, when_failed + condition_pattern, condition_not_pattern, when_unmet, when_failed ) VALUES ( add_step.flow_slug, @@ -1478,6 +1493,7 @@ BEGIN add_step.timeout, add_step.start_delay, add_step.condition_pattern, + add_step.condition_not_pattern, add_step.when_unmet, add_step.when_failed ) diff --git a/pkgs/core/supabase/migrations/atlas.sum b/pkgs/core/supabase/migrations/atlas.sum index ebb8f266a..5151880b0 100644 --- a/pkgs/core/supabase/migrations/atlas.sum +++ b/pkgs/core/supabase/migrations/atlas.sum @@ -1,4 +1,4 @@ -h1:YiBO80ZA6oQ84E10ZabIvo3OS/XglHkEmBn1Rp5Iay4= +h1:UUZln51my4XRIQECtp1HayMW7tGjk5w8qLQhW0x7gEY= 20250429164909_pgflow_initial.sql h1:I3n/tQIg5Q5nLg7RDoU3BzqHvFVjmumQxVNbXTPG15s= 20250517072017_pgflow_fix_poll_for_tasks_to_use_separate_statement_for_polling.sql h1:wTuXuwMxVniCr3ONCpodpVWJcHktoQZIbqMZ3sUHKMY= 20250609105135_pgflow_add_start_tasks_and_started_status.sql h1:ggGanW4Wyt8Kv6TWjnZ00/qVb3sm+/eFVDjGfT8qyPg= @@ -16,4 +16,4 @@ h1:YiBO80ZA6oQ84E10ZabIvo3OS/XglHkEmBn1Rp5Iay4= 20251212100113_pgflow_allow_data_loss_parameter.sql h1:Fg3RHj51STNHS4epQ2J4AFMj7NwG0XfyDTSA/9dcBIQ= 20251225163110_pgflow_add_flow_input_column.sql h1:734uCbTgKmPhTK3TY56uNYZ31T8u59yll9ea7nwtEoc= 20260103145141_pgflow_step_output_storage.sql h1:mgVHSFDLdtYy//SZ6C03j9Str1iS9xCM8Rz/wyFwn3o= -20260105214940_pgflow_step_conditions.sql h1:DIta8qrr+qRvA9aFCdWefk72qp27mcPvGGlAJswmitw= +20260108131350_pgflow_step_conditions.sql h1:7YMszmTlExOtx9PyYLB7hIc3RiMmtB4ZOc2EOQVfuPs= diff --git a/pkgs/core/supabase/tests/add_step/condition_not_pattern.test.sql b/pkgs/core/supabase/tests/add_step/condition_not_pattern.test.sql new file mode 100644 index 000000000..a1ae628a4 --- /dev/null +++ b/pkgs/core/supabase/tests/add_step/condition_not_pattern.test.sql @@ -0,0 +1,102 @@ +-- Test: add_step - condition_not_pattern parameter +-- Verifies the ifNot pattern (condition_not_pattern) is stored correctly +begin; +select plan(6); + +select pgflow_tests.reset_db(); +select pgflow.create_flow('ifnot_test'); + +-- Test 1: Add step with condition_not_pattern only +select pgflow.add_step( + 'ifnot_test', + 'step_with_ifnot', + condition_not_pattern => '{"role": "admin"}'::jsonb +); + +select is( + (select condition_not_pattern from pgflow.steps + where flow_slug = 'ifnot_test' and step_slug = 'step_with_ifnot'), + '{"role": "admin"}'::jsonb, + 'condition_not_pattern should be stored correctly' +); + +-- Test 2: Default condition_not_pattern should be NULL +select pgflow.add_step('ifnot_test', 'step_default_not'); + +select is( + (select condition_not_pattern from pgflow.steps + where flow_slug = 'ifnot_test' and step_slug = 'step_default_not'), + NULL::jsonb, + 'Default condition_not_pattern should be NULL' +); + +-- Test 3: Both condition_pattern and condition_not_pattern together +select pgflow.add_step( + 'ifnot_test', + 'step_with_both', + condition_pattern => '{"active": true}'::jsonb, + condition_not_pattern => '{"suspended": true}'::jsonb +); + +select ok( + (select + condition_pattern = '{"active": true}'::jsonb + AND condition_not_pattern = '{"suspended": true}'::jsonb + from pgflow.steps + where flow_slug = 'ifnot_test' and step_slug = 'step_with_both'), + 'Both condition_pattern and condition_not_pattern should be stored together' +); + +-- Test 4: condition_not_pattern with all other options +select pgflow.add_step( + 'ifnot_test', + 'step_all_options', + max_attempts => 5, + timeout => 30, + condition_not_pattern => '{"status": "disabled"}'::jsonb, + when_unmet => 'skip' +); + +select ok( + (select + opt_max_attempts = 5 + AND opt_timeout = 30 + AND condition_not_pattern = '{"status": "disabled"}'::jsonb + AND when_unmet = 'skip' + from pgflow.steps + where flow_slug = 'ifnot_test' and step_slug = 'step_all_options'), + 'condition_not_pattern should work with all other step options' +); + +-- Test 5: Complex nested condition_not_pattern +select pgflow.add_step( + 'ifnot_test', + 'step_nested_not', + condition_not_pattern => '{"user": {"role": "admin", "department": "IT"}}'::jsonb +); + +select is( + (select condition_not_pattern from pgflow.steps + where flow_slug = 'ifnot_test' and step_slug = 'step_nested_not'), + '{"user": {"role": "admin", "department": "IT"}}'::jsonb, + 'Nested condition_not_pattern should be stored correctly' +); + +-- Test 6: condition_not_pattern on dependent step +select pgflow.add_step('ifnot_test', 'first_step'); +select pgflow.add_step( + 'ifnot_test', + 'dependent_step', + deps_slugs => ARRAY['first_step'], + condition_not_pattern => '{"first_step": {"error": true}}'::jsonb +); + +select is( + (select condition_not_pattern from pgflow.steps + where flow_slug = 'ifnot_test' and step_slug = 'dependent_step'), + '{"first_step": {"error": true}}'::jsonb, + 'condition_not_pattern should be stored for dependent step' +); + +select finish(); +rollback; diff --git a/pkgs/core/supabase/tests/condition_evaluation/branching_opposite_conditions.test.sql b/pkgs/core/supabase/tests/condition_evaluation/branching_opposite_conditions.test.sql new file mode 100644 index 000000000..77b65b36d --- /dev/null +++ b/pkgs/core/supabase/tests/condition_evaluation/branching_opposite_conditions.test.sql @@ -0,0 +1,136 @@ +-- Test: Branching pattern - two steps with opposite conditions +-- For any input, exactly ONE step runs (mutual exclusion) +-- step1: if: { role: 'admin' } whenUnmet: 'skip' +-- step2: ifNot: { role: 'admin' } whenUnmet: 'skip' +begin; +select plan(9); + +select pgflow_tests.reset_db(); + +-- Create flow with two mutually exclusive branches +select pgflow.create_flow('branch_flow'); +-- Admin branch: only runs when role=admin +select pgflow.add_step( + flow_slug => 'branch_flow', + step_slug => 'admin_branch', + condition_pattern => '{"role": "admin"}'::jsonb, -- if: role=admin + when_unmet => 'skip' +); +-- Regular branch: only runs when role!=admin +select pgflow.add_step( + flow_slug => 'branch_flow', + step_slug => 'regular_branch', + condition_not_pattern => '{"role": "admin"}'::jsonb, -- ifNot: role=admin + when_unmet => 'skip' +); + +-- Test case 1: Admin user -> admin_branch runs, regular_branch skipped +with flow as ( + select * from pgflow.start_flow('branch_flow', '{"role": "admin", "name": "Alice"}'::jsonb) +) + +select run_id into temporary run1 from flow; + +select is( + ( + select status from pgflow.step_states + where run_id = (select run_id from run1) and step_slug = 'admin_branch' + ), + 'started', + 'Admin user: admin_branch should start' +); + +select is( + ( + select status from pgflow.step_states + where run_id = (select run_id from run1) and step_slug = 'regular_branch' + ), + 'skipped', + 'Admin user: regular_branch should be skipped' +); + +-- Verify exactly one step started for admin user +select is( + ( + select count(*)::int from pgflow.step_states + where run_id = (select run_id from run1) and status = 'started' + ), + 1, + 'Admin user: exactly one step should start' +); + +-- Test case 2: Regular user -> admin_branch skipped, regular_branch runs +with flow as ( + select * from pgflow.start_flow('branch_flow', '{"role": "user", "name": "Bob"}'::jsonb) +) + +select run_id into temporary run2 from flow; + +select is( + ( + select status from pgflow.step_states + where run_id = (select run_id from run2) and step_slug = 'admin_branch' + ), + 'skipped', + 'Regular user: admin_branch should be skipped' +); + +select is( + ( + select status from pgflow.step_states + where run_id = (select run_id from run2) and step_slug = 'regular_branch' + ), + 'started', + 'Regular user: regular_branch should start' +); + +-- Verify exactly one step started for regular user +select is( + ( + select count(*)::int from pgflow.step_states + where run_id = (select run_id from run2) and status = 'started' + ), + 1, + 'Regular user: exactly one step should start' +); + +-- Test case 3: No role field -> admin_branch skipped, regular_branch runs +-- (Missing field means input does NOT contain role=admin) +with flow as ( + select * from pgflow.start_flow('branch_flow', '{"name": "Charlie"}'::jsonb) +) + +select run_id into temporary run3 from flow; + +select is( + ( + select status from pgflow.step_states + where run_id = (select run_id from run3) and step_slug = 'admin_branch' + ), + 'skipped', + 'No role: admin_branch should be skipped (pattern not matched)' +); + +select is( + ( + select status from pgflow.step_states + where run_id = (select run_id from run3) and step_slug = 'regular_branch' + ), + 'started', + 'No role: regular_branch should start (pattern not matched = ifNot passes)' +); + +-- Verify exactly one step started for no-role user +select is( + ( + select count(*)::int from pgflow.step_states + where run_id = (select run_id from run3) and status = 'started' + ), + 1, + 'No role: exactly one step should start' +); + +drop table if exists run1, run2, run3; + +select finish(); +rollback; diff --git a/pkgs/core/supabase/tests/condition_evaluation/combined_if_and_ifnot.test.sql b/pkgs/core/supabase/tests/condition_evaluation/combined_if_and_ifnot.test.sql new file mode 100644 index 000000000..fe659bd1d --- /dev/null +++ b/pkgs/core/supabase/tests/condition_evaluation/combined_if_and_ifnot.test.sql @@ -0,0 +1,92 @@ +-- Test: Combined if+ifNot - BOTH conditions must pass (AND semantics) +-- Pattern: "active admin who is NOT suspended" +-- if: { role: 'admin', active: true } +-- ifNot: { suspended: true } +begin; +select plan(6); + +select pgflow_tests.reset_db(); + +-- Create flow with step that has both if and ifNot conditions +select pgflow.create_flow('combined_flow'); +select pgflow.add_step( + flow_slug => 'combined_flow', + step_slug => 'admin_action', + condition_pattern => '{"role": "admin", "active": true}'::jsonb, -- if + condition_not_pattern => '{"suspended": true}'::jsonb, -- ifNot + when_unmet => 'skip' +); +-- Add another step without conditions +select pgflow.add_step('combined_flow', 'always_step'); + +-- Test case 1: Active admin NOT suspended -> BOTH conditions met -> step runs +with flow as ( + select * from pgflow.start_flow('combined_flow', '{"role": "admin", "active": true}'::jsonb) +) +select run_id into temporary run1 from flow; + +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run1) and step_slug = 'admin_action'), + 'started', + 'Active admin not suspended: both conditions met, step should start' +); + +-- Test case 2: Active admin BUT suspended -> if passes, ifNot fails -> step skipped +with flow as ( + select * from pgflow.start_flow('combined_flow', '{"role": "admin", "active": true, "suspended": true}'::jsonb) +) +select run_id into temporary run2 from flow; + +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run2) and step_slug = 'admin_action'), + 'skipped', + 'Active admin but suspended: ifNot fails, step should be skipped' +); + +select is( + (select skip_reason from pgflow.step_states + where run_id = (select run_id from run2) and step_slug = 'admin_action'), + 'condition_unmet', + 'Skip reason should be condition_unmet' +); + +-- Test case 3: Regular user NOT suspended -> if fails -> step skipped +with flow as ( + select * from pgflow.start_flow('combined_flow', '{"role": "user", "active": true}'::jsonb) +) +select run_id into temporary run3 from flow; + +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run3) and step_slug = 'admin_action'), + 'skipped', + 'Regular user: if condition fails, step should be skipped' +); + +-- Test case 4: Inactive admin -> if fails -> step skipped +with flow as ( + select * from pgflow.start_flow('combined_flow', '{"role": "admin", "active": false}'::jsonb) +) +select run_id into temporary run4 from flow; + +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run4) and step_slug = 'admin_action'), + 'skipped', + 'Inactive admin: if condition fails (active!=true), step should be skipped' +); + +-- Test case 5: always_step should run in all cases (checking last run) +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run4) and step_slug = 'always_step'), + 'started', + 'Step without condition should always start' +); + +drop table if exists run1, run2, run3, run4; + +select finish(); +rollback; diff --git a/pkgs/core/supabase/tests/condition_evaluation/dependent_step_condition_met.test.sql b/pkgs/core/supabase/tests/condition_evaluation/dependent_step_condition_met.test.sql index a0b1f4538..3cd064e52 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/dependent_step_condition_met.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/dependent_step_condition_met.test.sql @@ -11,13 +11,11 @@ select pgflow_tests.reset_db(); select pgflow.create_flow('conditional_flow'); select pgflow.add_step('conditional_flow', 'first'); select pgflow.add_step( - 'conditional_flow', - 'checked_step', - '{first}', -- depends on first - null, null, null, null, -- default options - 'single', -- step_type - '{"first": {"success": true}}'::jsonb, -- if: first.success must be true - 'skip' -- when_unmet + flow_slug => 'conditional_flow', + step_slug => 'checked_step', + deps_slugs => ARRAY['first'], + condition_pattern => '{"first": {"success": true}}'::jsonb, -- first.success must be true + when_unmet => 'skip' ); -- Start flow diff --git a/pkgs/core/supabase/tests/condition_evaluation/dependent_step_condition_unmet_skip.test.sql b/pkgs/core/supabase/tests/condition_evaluation/dependent_step_condition_unmet_skip.test.sql index be8c2b4ba..93ca1f3ce 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/dependent_step_condition_unmet_skip.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/dependent_step_condition_unmet_skip.test.sql @@ -11,13 +11,11 @@ select pgflow_tests.reset_db(); select pgflow.create_flow('conditional_flow'); select pgflow.add_step('conditional_flow', 'first'); select pgflow.add_step( - 'conditional_flow', - 'checked_step', - '{first}', -- depends on first - null, null, null, null, -- default options - 'single', -- step_type - '{"first": {"success": true}}'::jsonb, -- if: first.success must be true - 'skip' -- when_unmet + flow_slug => 'conditional_flow', + step_slug => 'checked_step', + deps_slugs => ARRAY['first'], + condition_pattern => '{"first": {"success": true}}'::jsonb, -- first.success must be true + when_unmet => 'skip' ); -- Start flow diff --git a/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_pattern_matches_fail.test.sql b/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_pattern_matches_fail.test.sql new file mode 100644 index 000000000..1efbd33d6 --- /dev/null +++ b/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_pattern_matches_fail.test.sql @@ -0,0 +1,59 @@ +-- Test: ifNot pattern MATCHES (negative condition fails) with whenUnmet='fail' +-- When ifNot pattern MATCHES the input, the condition is NOT met (pattern should NOT match) +-- With whenUnmet='fail', this should fail the step and run +begin; +select plan(4); + +select pgflow_tests.reset_db(); + +-- Create flow with a root step that has ifNot condition +select pgflow.create_flow('ifnot_fail_flow'); +select pgflow.add_step( + flow_slug => 'ifnot_fail_flow', + step_slug => 'no_admin_step', + condition_not_pattern => '{"role": "admin"}'::jsonb, -- must NOT contain role=admin + when_unmet => 'fail' +); + +-- Start flow with input that MATCHES the ifNot pattern (role=admin) +-- Since input @> pattern, the ifNot condition is NOT met +with flow as ( + select * from pgflow.start_flow('ifnot_fail_flow', '{"role": "admin", "name": "Alice"}'::jsonb) +) +select run_id into temporary run_ids from flow; + +-- Test 1: Step should be 'failed' (ifNot condition not met because pattern matched) +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 'failed', + 'Step with matched ifNot pattern and whenUnmet=fail should be failed' +); + +-- Test 2: Error message should indicate condition not met +select is( + (select error_message from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 'Condition not met', + 'Error message should indicate condition not met' +); + +-- Test 3: No task should be created for failed step +select is( + (select count(*)::int from pgflow.step_tasks + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 0, + 'No task should be created for failed step' +); + +-- Test 4: Run should be 'failed' +select is( + (select status from pgflow.runs where run_id = (select run_id from run_ids)), + 'failed', + 'Run should be failed when step fails due to unmet ifNot condition' +); + +drop table if exists run_ids; + +select finish(); +rollback; diff --git a/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_pattern_not_matches.test.sql b/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_pattern_not_matches.test.sql new file mode 100644 index 000000000..64dcd5695 --- /dev/null +++ b/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_pattern_not_matches.test.sql @@ -0,0 +1,51 @@ +-- Test: ifNot pattern does NOT match - step should execute +-- When ifNot pattern does NOT match the input, the condition IS met +-- The step should execute normally +begin; +select plan(3); + +select pgflow_tests.reset_db(); + +-- Create flow with a root step that has ifNot condition +select pgflow.create_flow('ifnot_pass_flow'); +select pgflow.add_step( + flow_slug => 'ifnot_pass_flow', + step_slug => 'no_admin_step', + condition_not_pattern => '{"role": "admin"}'::jsonb, -- must NOT contain role=admin + when_unmet => 'fail' -- (doesn't matter for this test since condition is met) +); + +-- Start flow with input that does NOT match the ifNot pattern (role=user) +-- Since input does NOT contain role=admin, the ifNot condition IS met +with flow as ( + select * from pgflow.start_flow('ifnot_pass_flow', '{"role": "user", "name": "Bob"}'::jsonb) +) +select run_id into temporary run_ids from flow; + +-- Test 1: Step should be 'started' (condition met, step executes) +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 'started', + 'Step should start when ifNot pattern does not match input' +); + +-- Test 2: Task should be created for the step +select is( + (select count(*)::int from pgflow.step_tasks + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 1, + 'Task should be created for step when condition is met' +); + +-- Test 3: Run should be 'started' +select is( + (select status from pgflow.runs where run_id = (select run_id from run_ids)), + 'started', + 'Run should continue when ifNot condition is met' +); + +drop table if exists run_ids; + +select finish(); +rollback; diff --git a/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_skip.test.sql b/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_skip.test.sql new file mode 100644 index 000000000..e7a3dc7b7 --- /dev/null +++ b/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_skip.test.sql @@ -0,0 +1,68 @@ +-- Test: ifNot pattern MATCHES (condition not met) with whenUnmet='skip' +-- Step should be skipped but run continues +begin; +select plan(5); + +select pgflow_tests.reset_db(); + +-- Create flow with a root step that has ifNot condition +select pgflow.create_flow('ifnot_skip_flow'); +select pgflow.add_step( + flow_slug => 'ifnot_skip_flow', + step_slug => 'no_admin_step', + condition_not_pattern => '{"role": "admin"}'::jsonb, -- must NOT contain role=admin + when_unmet => 'skip' +); +-- Add another root step without condition +select pgflow.add_step('ifnot_skip_flow', 'other_step'); + +-- Start flow with input that MATCHES the ifNot pattern (role=admin) +-- The ifNot condition is NOT met, so step should be skipped +with flow as ( + select * from pgflow.start_flow('ifnot_skip_flow', '{"role": "admin", "name": "Alice"}'::jsonb) +) +select run_id into temporary run_ids from flow; + +-- Test 1: no_admin_step should be 'skipped' +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 'skipped', + 'Step with matched ifNot pattern and whenUnmet=skip should be skipped' +); + +-- Test 2: skip_reason should be 'condition_unmet' +select is( + (select skip_reason from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 'condition_unmet', + 'Skip reason should be condition_unmet' +); + +-- Test 3: No task should be created for skipped step +select is( + (select count(*)::int from pgflow.step_tasks + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 0, + 'No task should be created for skipped step' +); + +-- Test 4: other_step should be started normally +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'other_step'), + 'started', + 'Other step without condition should start normally' +); + +-- Test 5: Run should continue (not failed) +select is( + (select status from pgflow.runs where run_id = (select run_id from run_ids)), + 'started', + 'Run should continue when step is skipped' +); + +drop table if exists run_ids; + +select finish(); +rollback; diff --git a/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_skip_cascade.test.sql b/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_skip_cascade.test.sql new file mode 100644 index 000000000..cb51fa66a --- /dev/null +++ b/pkgs/core/supabase/tests/condition_evaluation/ifnot_root_step_skip_cascade.test.sql @@ -0,0 +1,77 @@ +-- Test: ifNot pattern MATCHES (condition not met) with whenUnmet='skip-cascade' +-- Step and all dependents should be skipped +begin; +select plan(6); + +select pgflow_tests.reset_db(); + +-- Create flow with ifNot step and a dependent +select pgflow.create_flow('ifnot_cascade_flow'); +select pgflow.add_step( + flow_slug => 'ifnot_cascade_flow', + step_slug => 'no_admin_step', + condition_not_pattern => '{"role": "admin"}'::jsonb, -- must NOT contain role=admin + when_unmet => 'skip-cascade' +); +-- Add a dependent step +select pgflow.add_step('ifnot_cascade_flow', 'dependent_step', ARRAY['no_admin_step']); +-- Add an independent step +select pgflow.add_step('ifnot_cascade_flow', 'independent_step'); + +-- Start flow with input that MATCHES the ifNot pattern (role=admin) +with flow as ( + select * from pgflow.start_flow('ifnot_cascade_flow', '{"role": "admin", "name": "Alice"}'::jsonb) +) +select run_id into temporary run_ids from flow; + +-- Test 1: no_admin_step should be 'skipped' +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 'skipped', + 'Step with matched ifNot pattern and whenUnmet=skip-cascade should be skipped' +); + +-- Test 2: skip_reason for no_admin_step should be 'condition_unmet' +select is( + (select skip_reason from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'no_admin_step'), + 'condition_unmet', + 'Skip reason should be condition_unmet' +); + +-- Test 3: dependent_step should also be 'skipped' (cascade) +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'dependent_step'), + 'skipped', + 'Dependent step should be skipped due to cascade' +); + +-- Test 4: skip_reason for dependent_step should be 'dependency_skipped' +select is( + (select skip_reason from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'dependent_step'), + 'dependency_skipped', + 'Dependent skip reason should be dependency_skipped' +); + +-- Test 5: independent_step should be started normally +select is( + (select status from pgflow.step_states + where run_id = (select run_id from run_ids) and step_slug = 'independent_step'), + 'started', + 'Independent step should start normally' +); + +-- Test 6: Run should continue +select is( + (select status from pgflow.runs where run_id = (select run_id from run_ids)), + 'started', + 'Run should continue when step is skip-cascaded' +); + +drop table if exists run_ids; + +select finish(); +rollback; diff --git a/pkgs/core/supabase/tests/condition_evaluation/plain_skip_iterates_until_convergence.test.sql b/pkgs/core/supabase/tests/condition_evaluation/plain_skip_iterates_until_convergence.test.sql index faea7f191..2681373e6 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/plain_skip_iterates_until_convergence.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/plain_skip_iterates_until_convergence.test.sql @@ -20,29 +20,23 @@ select pgflow_tests.reset_db(); -- c has no condition select pgflow.create_flow('chain_skip'); select pgflow.add_step( - 'chain_skip', - 'step_a', - '{}', -- root step - null, null, null, null, - 'single', - '{"enabled": true}'::jsonb, -- if: requires enabled=true - 'skip' -- plain skip + flow_slug => 'chain_skip', + step_slug => 'step_a', + condition_pattern => '{"enabled": true}'::jsonb, -- requires enabled=true + when_unmet => 'skip' -- plain skip ); select pgflow.add_step( - 'chain_skip', - 'step_b', - '{step_a}', -- depends on a - null, null, null, null, - 'single', - '{"step_a": {"success": true}}'::jsonb, -- if: a.success must be true - 'skip' -- plain skip (won't be met since a was skipped) + flow_slug => 'chain_skip', + step_slug => 'step_b', + deps_slugs => ARRAY['step_a'], + condition_pattern => '{"step_a": {"success": true}}'::jsonb, -- a.success must be true + when_unmet => 'skip' -- plain skip (won't be met since a was skipped) ); select pgflow.add_step( - 'chain_skip', - 'step_c', - '{step_b}', -- depends on b - null, null, null, null, - 'single' -- no condition + flow_slug => 'chain_skip', + step_slug => 'step_c', + deps_slugs => ARRAY['step_b'] + -- no condition ); -- Start flow with input that does NOT match step_a's condition diff --git a/pkgs/core/supabase/tests/condition_evaluation/plain_skip_propagates_to_map.test.sql b/pkgs/core/supabase/tests/condition_evaluation/plain_skip_propagates_to_map.test.sql index cd850c1ce..241090b51 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/plain_skip_propagates_to_map.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/plain_skip_propagates_to_map.test.sql @@ -16,21 +16,17 @@ select pgflow_tests.reset_db(); -- producer (conditional, skip) -> map_consumer (map step) select pgflow.create_flow('skip_to_map'); select pgflow.add_step( - 'skip_to_map', - 'producer', - '{}', -- root step - null, null, null, null, -- default options - 'single', -- step_type - '{"enabled": true}'::jsonb, -- if: requires enabled=true - 'skip' -- when_unmet - plain skip (not skip-cascade) + flow_slug => 'skip_to_map', + step_slug => 'producer', + condition_pattern => '{"enabled": true}'::jsonb, -- requires enabled=true + when_unmet => 'skip' -- plain skip (not skip-cascade) ); -- Map consumer: no condition, just depends on producer select pgflow.add_step( - 'skip_to_map', - 'map_consumer', - '{producer}', -- depends on producer - null, null, null, null, -- default options - 'map' -- map step type (no condition_pattern or when_unmet needed) + flow_slug => 'skip_to_map', + step_slug => 'map_consumer', + deps_slugs => ARRAY['producer'], + step_type => 'map' ); -- Start flow with input that does NOT match producer's condition diff --git a/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_met.test.sql b/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_met.test.sql index d7c853548..b86a91f24 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_met.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_met.test.sql @@ -10,13 +10,10 @@ select pgflow_tests.reset_db(); -- Create flow with a root step that has a condition select pgflow.create_flow('conditional_flow'); select pgflow.add_step( - 'conditional_flow', - 'checked_step', - '{}', -- no deps (root step) - null, null, null, null, -- default options - 'single', -- step_type - '{"enabled": true}'::jsonb, -- condition_pattern: requires enabled=true - 'skip' -- when_unmet + flow_slug => 'conditional_flow', + step_slug => 'checked_step', + condition_pattern => '{"enabled": true}'::jsonb, -- requires enabled=true + when_unmet => 'skip' ); -- Start flow with input that matches condition diff --git a/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_fail.test.sql b/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_fail.test.sql index c12703a85..dca13a3e4 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_fail.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_fail.test.sql @@ -10,13 +10,10 @@ select pgflow_tests.reset_db(); -- Create flow with a root step that has a condition with fail mode select pgflow.create_flow('conditional_flow'); select pgflow.add_step( - 'conditional_flow', - 'checked_step', - '{}', -- no deps (root step) - null, null, null, null, -- default options - 'single', -- step_type - '{"enabled": true}'::jsonb, -- condition_pattern: requires enabled=true - 'fail' -- when_unmet - causes run to fail + flow_slug => 'conditional_flow', + step_slug => 'checked_step', + condition_pattern => '{"enabled": true}'::jsonb, -- requires enabled=true + when_unmet => 'fail' -- causes run to fail ); -- Start flow with input that does NOT match condition diff --git a/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_skip.test.sql b/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_skip.test.sql index aacdff2c2..b11364d35 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_skip.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_skip.test.sql @@ -10,13 +10,10 @@ select pgflow_tests.reset_db(); -- Create flow with a root step that has a condition select pgflow.create_flow('conditional_flow'); select pgflow.add_step( - 'conditional_flow', - 'checked_step', - '{}', -- no deps (root step) - null, null, null, null, -- default options - 'single', -- step_type - '{"enabled": true}'::jsonb, -- condition_pattern: requires enabled=true - 'skip' -- when_unmet + flow_slug => 'conditional_flow', + step_slug => 'checked_step', + condition_pattern => '{"enabled": true}'::jsonb, -- requires enabled=true + when_unmet => 'skip' ); -- Add another root step without condition select pgflow.add_step('conditional_flow', 'other_step'); diff --git a/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_skip_cascade.test.sql b/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_skip_cascade.test.sql index 3cde76fc2..59201f043 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_skip_cascade.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/root_step_condition_unmet_skip_cascade.test.sql @@ -10,18 +10,15 @@ select pgflow_tests.reset_db(); -- Create flow with a root step that has a condition and a dependent select pgflow.create_flow('conditional_flow'); select pgflow.add_step( - 'conditional_flow', - 'checked_step', - '{}', -- no deps (root step) - null, null, null, null, -- default options - 'single', -- step_type - '{"enabled": true}'::jsonb, -- condition_pattern - 'skip-cascade' -- when_unmet - skip this AND dependents + flow_slug => 'conditional_flow', + step_slug => 'checked_step', + condition_pattern => '{"enabled": true}'::jsonb, + when_unmet => 'skip-cascade' -- skip this AND dependents ); select pgflow.add_step( - 'conditional_flow', - 'dependent_step', - '{checked_step}' -- depends on checked_step + flow_slug => 'conditional_flow', + step_slug => 'dependent_step', + deps_slugs => ARRAY['checked_step'] ); -- Add an independent root step that should still run select pgflow.add_step('conditional_flow', 'other_step'); diff --git a/pkgs/core/supabase/tests/condition_evaluation/skipped_deps_excluded_from_input.test.sql b/pkgs/core/supabase/tests/condition_evaluation/skipped_deps_excluded_from_input.test.sql index abcdd5bb1..d88eacfc1 100644 --- a/pkgs/core/supabase/tests/condition_evaluation/skipped_deps_excluded_from_input.test.sql +++ b/pkgs/core/supabase/tests/condition_evaluation/skipped_deps_excluded_from_input.test.sql @@ -22,25 +22,21 @@ select pgflow_tests.reset_db(); -- c depends on both select pgflow.create_flow('skip_diamond'); select pgflow.add_step( - 'skip_diamond', - 'step_a', - '{}', -- root step - null, null, null, null, - 'single', - '{"enabled": true}'::jsonb, -- if: requires enabled=true - 'skip' -- plain skip + flow_slug => 'skip_diamond', + step_slug => 'step_a', + condition_pattern => '{"enabled": true}'::jsonb, -- requires enabled=true + when_unmet => 'skip' -- plain skip ); select pgflow.add_step( - 'skip_diamond', - 'step_b', - '{}' -- root step, no condition + flow_slug => 'skip_diamond', + step_slug => 'step_b' + -- root step, no condition ); select pgflow.add_step( - 'skip_diamond', - 'step_c', - '{step_a, step_b}', -- depends on both - null, null, null, null, - 'single' -- no condition + flow_slug => 'skip_diamond', + step_slug => 'step_c', + deps_slugs => ARRAY['step_a', 'step_b'] + -- no condition ); -- Start flow with input that skips step_a diff --git a/pkgs/dsl/__tests__/runtime/condition-options.test.ts b/pkgs/dsl/__tests__/runtime/condition-options.test.ts index 4c52dfda7..445f36d43 100644 --- a/pkgs/dsl/__tests__/runtime/condition-options.test.ts +++ b/pkgs/dsl/__tests__/runtime/condition-options.test.ts @@ -3,43 +3,40 @@ import { Flow } from '../../src/dsl.js'; import { compileFlow } from '../../src/compile-flow.js'; describe('Condition Options', () => { - describe('DSL accepts if and else', () => { + describe('DSL accepts if and whenUnmet', () => { it('should accept if option on a step', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'conditional_step', if: { enabled: true } }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'conditional_step', if: { enabled: true } }, + () => 'result' + ); const step = flow.getStepDefinition('conditional_step'); expect(step.options.if).toEqual({ enabled: true }); }); - it('should accept else option on a step', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'conditional_step', else: 'skip' }, - () => 'result' - ); + it('should accept whenUnmet option on a step', () => { + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'conditional_step', whenUnmet: 'skip' }, + () => 'result' + ); const step = flow.getStepDefinition('conditional_step'); - expect(step.options.else).toBe('skip'); + expect(step.options.whenUnmet).toBe('skip'); }); - it('should accept both if and else together', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { - slug: 'conditional_step', - if: { status: 'active' }, - else: 'skip-cascade', - }, - () => 'result' - ); + it('should accept both if and whenUnmet together', () => { + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'conditional_step', + if: { status: 'active' }, + whenUnmet: 'skip-cascade', + }, + () => 'result' + ); const step = flow.getStepDefinition('conditional_step'); expect(step.options.if).toEqual({ status: 'active' }); - expect(step.options.else).toBe('skip-cascade'); + expect(step.options.whenUnmet).toBe('skip-cascade'); }); it('should accept if on dependent steps', () => { @@ -50,37 +47,37 @@ describe('Condition Options', () => { slug: 'conditional_step', dependsOn: ['first'], if: { first: { success: true } }, - else: 'skip', + whenUnmet: 'skip', }, () => 'result' ); const step = flow.getStepDefinition('conditional_step'); expect(step.options.if).toEqual({ first: { success: true } }); - expect(step.options.else).toBe('skip'); + expect(step.options.whenUnmet).toBe('skip'); }); }); describe('compileFlow includes condition parameters', () => { it('should compile condition_pattern for root step', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1', if: { enabled: true } }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', if: { enabled: true } }, + () => 'result' + ); const statements = compileFlow(flow); expect(statements).toHaveLength(2); - expect(statements[1]).toContain("condition_pattern => '{\"enabled\":true}'"); + expect(statements[1]).toContain( + 'condition_pattern => \'{"enabled":true}\'' + ); }); it('should compile when_unmet for step', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1', else: 'fail' }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', whenUnmet: 'fail' }, + () => 'result' + ); const statements = compileFlow(flow); @@ -89,42 +86,44 @@ describe('Condition Options', () => { }); it('should compile both condition_pattern and when_unmet together', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { - slug: 'step1', - if: { active: true, type: 'premium' }, - else: 'skip-cascade', - }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'step1', + if: { active: true, type: 'premium' }, + whenUnmet: 'skip-cascade', + }, + () => 'result' + ); const statements = compileFlow(flow); expect(statements).toHaveLength(2); - expect(statements[1]).toContain("condition_pattern => '{\"active\":true,\"type\":\"premium\"}'"); + expect(statements[1]).toContain( + 'condition_pattern => \'{"active":true,"type":"premium"}\'' + ); expect(statements[1]).toContain("when_unmet => 'skip-cascade'"); }); it('should compile step with all options including condition', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { - slug: 'step1', - maxAttempts: 3, - timeout: 60, - if: { enabled: true }, - else: 'skip', - }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'step1', + maxAttempts: 3, + timeout: 60, + if: { enabled: true }, + whenUnmet: 'skip', + }, + () => 'result' + ); const statements = compileFlow(flow); expect(statements).toHaveLength(2); expect(statements[1]).toContain('max_attempts => 3'); expect(statements[1]).toContain('timeout => 60'); - expect(statements[1]).toContain("condition_pattern => '{\"enabled\":true}'"); + expect(statements[1]).toContain( + 'condition_pattern => \'{"enabled":true}\'' + ); expect(statements[1]).toContain("when_unmet => 'skip'"); }); @@ -136,7 +135,7 @@ describe('Condition Options', () => { slug: 'second', dependsOn: ['first'], if: { first: { success: true } }, - else: 'skip', + whenUnmet: 'skip', }, () => 'result' ); @@ -145,34 +144,145 @@ describe('Condition Options', () => { expect(statements).toHaveLength(3); expect(statements[2]).toContain("ARRAY['first']"); - expect(statements[2]).toContain("condition_pattern => '{\"first\":{\"success\":true}}'"); + expect(statements[2]).toContain( + 'condition_pattern => \'{"first":{"success":true}}\'' + ); expect(statements[2]).toContain("when_unmet => 'skip'"); }); }); - describe('else validation', () => { - it('should only accept valid else values', () => { + describe('whenUnmet validation', () => { + it('should only accept valid whenUnmet values', () => { // Valid values should work expect(() => new Flow({ slug: 'test' }).step( - { slug: 's1', else: 'fail' }, + { slug: 's1', whenUnmet: 'fail' }, () => 1 ) ).not.toThrow(); expect(() => new Flow({ slug: 'test' }).step( - { slug: 's1', else: 'skip' }, + { slug: 's1', whenUnmet: 'skip' }, () => 1 ) ).not.toThrow(); expect(() => new Flow({ slug: 'test' }).step( - { slug: 's1', else: 'skip-cascade' }, + { slug: 's1', whenUnmet: 'skip-cascade' }, () => 1 ) ).not.toThrow(); }); }); + + describe('DSL accepts ifNot', () => { + it('should accept ifNot option on a step', () => { + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'no_admin_step', ifNot: { role: 'admin' } }, + () => 'result' + ); + + const step = flow.getStepDefinition('no_admin_step'); + expect(step.options.ifNot).toEqual({ role: 'admin' }); + }); + + it('should accept both if and ifNot together', () => { + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'admin_action', + if: { role: 'admin', active: true }, + ifNot: { suspended: true }, + whenUnmet: 'skip', + }, + () => 'result' + ); + + const step = flow.getStepDefinition('admin_action'); + expect(step.options.if).toEqual({ role: 'admin', active: true }); + expect(step.options.ifNot).toEqual({ suspended: true }); + expect(step.options.whenUnmet).toBe('skip'); + }); + + it('should accept ifNot on dependent steps', () => { + const flow = new Flow({ slug: 'test_flow' }) + .step({ slug: 'first' }, () => ({ error: false })) + .step( + { + slug: 'continue_step', + dependsOn: ['first'], + ifNot: { first: { error: true } }, + whenUnmet: 'skip', + }, + () => 'result' + ); + + const step = flow.getStepDefinition('continue_step'); + expect(step.options.ifNot).toEqual({ first: { error: true } }); + expect(step.options.whenUnmet).toBe('skip'); + }); + }); + + describe('compileFlow includes ifNot parameters', () => { + it('should compile condition_not_pattern for root step', () => { + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', ifNot: { role: 'admin' } }, + () => 'result' + ); + + const statements = compileFlow(flow); + + expect(statements).toHaveLength(2); + expect(statements[1]).toContain( + 'condition_not_pattern => \'{"role":"admin"}\'' + ); + }); + + it('should compile both if and ifNot together', () => { + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'step1', + if: { active: true }, + ifNot: { suspended: true }, + whenUnmet: 'skip', + }, + () => 'result' + ); + + const statements = compileFlow(flow); + + expect(statements).toHaveLength(2); + expect(statements[1]).toContain( + 'condition_pattern => \'{"active":true}\'' + ); + expect(statements[1]).toContain( + 'condition_not_pattern => \'{"suspended":true}\'' + ); + expect(statements[1]).toContain("when_unmet => 'skip'"); + }); + + it('should compile ifNot for dependent step', () => { + const flow = new Flow({ slug: 'test_flow' }) + .step({ slug: 'first' }, () => ({ error: false })) + .step( + { + slug: 'second', + dependsOn: ['first'], + ifNot: { first: { error: true } }, + whenUnmet: 'skip', + }, + () => 'result' + ); + + const statements = compileFlow(flow); + + expect(statements).toHaveLength(3); + expect(statements[2]).toContain("ARRAY['first']"); + expect(statements[2]).toContain( + 'condition_not_pattern => \'{"first":{"error":true}}\'' + ); + expect(statements[2]).toContain("when_unmet => 'skip'"); + }); + }); }); diff --git a/pkgs/dsl/__tests__/runtime/when-failed-options.test.ts b/pkgs/dsl/__tests__/runtime/when-failed-options.test.ts index 1691f7e93..db831fe94 100644 --- a/pkgs/dsl/__tests__/runtime/when-failed-options.test.ts +++ b/pkgs/dsl/__tests__/runtime/when-failed-options.test.ts @@ -5,33 +5,30 @@ import { compileFlow } from '../../src/compile-flow.js'; describe('retriesExhausted Options', () => { describe('DSL accepts retriesExhausted option', () => { it('should accept retriesExhausted option on a step', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1', retriesExhausted: 'skip' }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', retriesExhausted: 'skip' }, + () => 'result' + ); const step = flow.getStepDefinition('step1'); expect(step.options.retriesExhausted).toBe('skip'); }); it('should accept retriesExhausted: fail (default behavior)', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1', retriesExhausted: 'fail' }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', retriesExhausted: 'fail' }, + () => 'result' + ); const step = flow.getStepDefinition('step1'); expect(step.options.retriesExhausted).toBe('fail'); }); it('should accept retriesExhausted: skip-cascade', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1', retriesExhausted: 'skip-cascade' }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', retriesExhausted: 'skip-cascade' }, + () => 'result' + ); const step = flow.getStepDefinition('step1'); expect(step.options.retriesExhausted).toBe('skip-cascade'); @@ -54,16 +51,15 @@ describe('retriesExhausted Options', () => { }); it('should accept retriesExhausted together with other options', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { - slug: 'step1', - maxAttempts: 3, - timeout: 60, - retriesExhausted: 'skip-cascade', - }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'step1', + maxAttempts: 3, + timeout: 60, + retriesExhausted: 'skip-cascade', + }, + () => 'result' + ); const step = flow.getStepDefinition('step1'); expect(step.options.maxAttempts).toBe(3); @@ -71,32 +67,30 @@ describe('retriesExhausted Options', () => { expect(step.options.retriesExhausted).toBe('skip-cascade'); }); - it('should accept both else and retriesExhausted together', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { - slug: 'step1', - if: { enabled: true }, - else: 'skip', - retriesExhausted: 'skip-cascade', - }, - () => 'result' - ); + it('should accept both whenUnmet and retriesExhausted together', () => { + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'step1', + if: { enabled: true }, + whenUnmet: 'skip', + retriesExhausted: 'skip-cascade', + }, + () => 'result' + ); const step = flow.getStepDefinition('step1'); expect(step.options.if).toEqual({ enabled: true }); - expect(step.options.else).toBe('skip'); + expect(step.options.whenUnmet).toBe('skip'); expect(step.options.retriesExhausted).toBe('skip-cascade'); }); }); describe('compileFlow includes when_failed parameter', () => { it('should compile when_failed for step', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1', retriesExhausted: 'skip' }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', retriesExhausted: 'skip' }, + () => 'result' + ); const statements = compileFlow(flow); @@ -105,11 +99,10 @@ describe('retriesExhausted Options', () => { }); it('should compile when_failed: fail', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1', retriesExhausted: 'fail' }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', retriesExhausted: 'fail' }, + () => 'result' + ); const statements = compileFlow(flow); @@ -118,11 +111,10 @@ describe('retriesExhausted Options', () => { }); it('should compile when_failed: skip-cascade', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1', retriesExhausted: 'skip-cascade' }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1', retriesExhausted: 'skip-cascade' }, + () => 'result' + ); const statements = compileFlow(flow); @@ -131,35 +123,35 @@ describe('retriesExhausted Options', () => { }); it('should compile step with all options including retriesExhausted', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { - slug: 'step1', - maxAttempts: 3, - timeout: 60, - if: { enabled: true }, - else: 'skip', - retriesExhausted: 'skip-cascade', - }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'step1', + maxAttempts: 3, + timeout: 60, + if: { enabled: true }, + whenUnmet: 'skip', + retriesExhausted: 'skip-cascade', + }, + () => 'result' + ); const statements = compileFlow(flow); expect(statements).toHaveLength(2); expect(statements[1]).toContain('max_attempts => 3'); expect(statements[1]).toContain('timeout => 60'); - expect(statements[1]).toContain("condition_pattern => '{\"enabled\":true}'"); + expect(statements[1]).toContain( + 'condition_pattern => \'{"enabled":true}\'' + ); expect(statements[1]).toContain("when_unmet => 'skip'"); expect(statements[1]).toContain("when_failed => 'skip-cascade'"); }); it('should not include when_failed when not specified', () => { - const flow = new Flow({ slug: 'test_flow' }) - .step( - { slug: 'step1' }, - () => 'result' - ); + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'step1' }, + () => 'result' + ); const statements = compileFlow(flow); @@ -170,22 +162,20 @@ describe('retriesExhausted Options', () => { describe('retriesExhausted on map steps', () => { it('should accept retriesExhausted on map step', () => { - const flow = new Flow({ slug: 'test_flow' }) - .map( - { slug: 'map_step', retriesExhausted: 'skip' }, - (item) => item.toUpperCase() - ); + const flow = new Flow({ slug: 'test_flow' }).map( + { slug: 'map_step', retriesExhausted: 'skip' }, + (item) => item.toUpperCase() + ); const step = flow.getStepDefinition('map_step'); expect(step.options.retriesExhausted).toBe('skip'); }); it('should compile when_failed for map step', () => { - const flow = new Flow({ slug: 'test_flow' }) - .map( - { slug: 'map_step', retriesExhausted: 'skip-cascade' }, - (item) => item.toUpperCase() - ); + const flow = new Flow({ slug: 'test_flow' }).map( + { slug: 'map_step', retriesExhausted: 'skip-cascade' }, + (item) => item.toUpperCase() + ); const statements = compileFlow(flow); diff --git a/pkgs/dsl/__tests__/types/condition-pattern.test-d.ts b/pkgs/dsl/__tests__/types/condition-pattern.test-d.ts index 684286dba..a19f0bfea 100644 --- a/pkgs/dsl/__tests__/types/condition-pattern.test-d.ts +++ b/pkgs/dsl/__tests__/types/condition-pattern.test-d.ts @@ -76,7 +76,9 @@ describe('ContainmentPattern utility type', () => { type Pattern = ContainmentPattern; // Should allow partial object patterns in array - expectTypeOf().toEqualTypeOf<{ type?: string; value?: number }[]>(); + expectTypeOf().toEqualTypeOf< + { type?: string; value?: number }[] + >(); }); it('should allow array pattern with specific elements', () => { @@ -317,3 +319,300 @@ describe('if option typing in step methods', () => { }); }); }); + +describe('ifNot option typing in step methods', () => { + describe('root step ifNot', () => { + it('should type ifNot as ContainmentPattern', () => { + type FlowInput = { userId: string; role: string }; + + // This should compile - valid partial pattern + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'check', ifNot: { role: 'admin' } }, + (input) => input.userId + ); + + expectTypeOf(flow).toBeObject(); + }); + + it('should reject invalid keys in ifNot', () => { + type FlowInput = { userId: string; role: string }; + + // @ts-expect-error - 'invalidKey' does not exist on FlowInput + new Flow({ slug: 'test_flow' }).step( + { slug: 'check', ifNot: { invalidKey: 'value' } }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (input: any) => input.userId + ); + }); + + it('should reject wrong value types in ifNot', () => { + type FlowInput = { userId: string; role: string }; + + // @ts-expect-error - role should be string, not number + new Flow({ slug: 'test_flow' }).step( + { slug: 'check', ifNot: { role: 123 } }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (input: any) => input.userId + ); + }); + + it('should allow combined if and ifNot', () => { + type FlowInput = { role: string; active: boolean; suspended?: boolean }; + + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'admin_action', + if: { role: 'admin', active: true }, + ifNot: { suspended: true }, + }, + (input) => input.role + ); + + expectTypeOf(flow).toBeObject(); + }); + }); + + describe('dependent step ifNot', () => { + it('should type ifNot as ContainmentPattern', () => { + const flow = new Flow<{ initial: string }>({ slug: 'test_flow' }) + .step({ slug: 'fetch' }, () => ({ hasError: true, data: 'result' })) + .step( + { + slug: 'process', + dependsOn: ['fetch'], + ifNot: { fetch: { hasError: true } }, + }, + (deps) => deps.fetch.data + ); + + expectTypeOf(flow).toBeObject(); + }); + + it('should reject invalid dep slug in ifNot', () => { + new Flow<{ initial: string }>({ slug: 'test_flow' }) + .step({ slug: 'fetch' }, () => ({ status: 'ok' })) + .step( + { + slug: 'process', + dependsOn: ['fetch'], + // @ts-expect-error - 'nonexistent' is not a dependency + ifNot: { nonexistent: { status: 'error' } }, + }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (deps: any) => deps.fetch.status + ); + }); + + it('should reject invalid keys within dep output for ifNot', () => { + new Flow<{ initial: string }>({ slug: 'test_flow' }) + .step({ slug: 'fetch' }, () => ({ status: 'ok' })) + .step( + { + slug: 'process', + dependsOn: ['fetch'], + // @ts-expect-error - 'invalidField' does not exist on fetch output + ifNot: { fetch: { invalidField: 'value' } }, + }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (deps: any) => deps.fetch.status + ); + }); + }); + + describe('array step ifNot', () => { + it('should type ifNot for root array step', () => { + type FlowInput = { items: string[]; disabled: boolean }; + + const flow = new Flow({ slug: 'test_flow' }).array( + { slug: 'getItems', ifNot: { disabled: true } }, + (input) => input.items + ); + + expectTypeOf(flow).toBeObject(); + }); + + it('should type ifNot for dependent array step', () => { + const flow = new Flow<{ initial: string }>({ slug: 'test_flow' }) + .step({ slug: 'fetch' }, () => ({ error: false, items: ['a', 'b'] })) + .array( + { + slug: 'process', + dependsOn: ['fetch'], + ifNot: { fetch: { error: true } }, + }, + (deps) => deps.fetch.items + ); + + expectTypeOf(flow).toBeObject(); + }); + }); + + describe('map step ifNot', () => { + it('should type ifNot for root map step', () => { + type FlowInput = { type: string; value: number }[]; + + const flow = new Flow({ slug: 'test_flow' }).map( + // Root map ifNot checks the array itself + { slug: 'process', ifNot: [{ type: 'disabled' }] }, + (item) => item.value * 2 + ); + + expectTypeOf(flow).toBeObject(); + }); + + it('should type ifNot for dependent map step', () => { + const flow = new Flow<{ initial: string }>({ slug: 'test_flow' }) + .step({ slug: 'fetch' }, () => [ + { id: 1, deleted: false }, + { id: 2, deleted: true }, + ]) + .map( + { + slug: 'process', + array: 'fetch', + // Condition checks the array dep + ifNot: { fetch: [{ deleted: true }] }, + }, + (item) => item.id + ); + + expectTypeOf(flow).toBeObject(); + }); + }); +}); + +describe('whenUnmet requires if or ifNot', () => { + describe('step method', () => { + it('should allow whenUnmet with if', () => { + type FlowInput = { role: string }; + + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'admin', if: { role: 'admin' }, whenUnmet: 'skip' }, + (input) => input.role + ); + + expectTypeOf(flow).toBeObject(); + }); + + it('should allow whenUnmet with ifNot', () => { + type FlowInput = { role: string }; + + const flow = new Flow({ slug: 'test_flow' }).step( + { slug: 'non_admin', ifNot: { role: 'admin' }, whenUnmet: 'skip' }, + (input) => input.role + ); + + expectTypeOf(flow).toBeObject(); + }); + + it('should allow whenUnmet with both if and ifNot', () => { + type FlowInput = { role: string; suspended: boolean }; + + const flow = new Flow({ slug: 'test_flow' }).step( + { + slug: 'active_admin', + if: { role: 'admin' }, + ifNot: { suspended: true }, + whenUnmet: 'skip-cascade', + }, + (input) => input.role + ); + + expectTypeOf(flow).toBeObject(); + }); + }); + + describe('array method', () => { + it('should allow whenUnmet with if on array step', () => { + type FlowInput = { items: string[]; enabled: boolean }; + + const flow = new Flow({ slug: 'test_flow' }).array( + { slug: 'getItems', if: { enabled: true }, whenUnmet: 'skip' }, + (input) => input.items + ); + + expectTypeOf(flow).toBeObject(); + }); + }); + + describe('map method', () => { + it('should allow whenUnmet with ifNot on map step', () => { + type FlowInput = { type: string; value: number }[]; + + const flow = new Flow({ slug: 'test_flow' }).map( + { slug: 'process', ifNot: [{ type: 'disabled' }], whenUnmet: 'skip' }, + (item) => item.value + ); + + expectTypeOf(flow).toBeObject(); + }); + }); + + describe('whenUnmet rejection tests', () => { + it('should reject whenUnmet without if or ifNot on root step', () => { + type FlowInput = { role: string }; + + new Flow({ slug: 'test_flow' }) + // @ts-expect-error - whenUnmet requires if or ifNot + .step({ slug: 'step', whenUnmet: 'skip' }, (input) => input.role); + }); + + it('should reject whenUnmet without if or ifNot on dependent step', () => { + const flow = new Flow<{ initial: string }>({ slug: 'test_flow' }).step( + { slug: 'first' }, + () => ({ done: true }) + ); + + // @ts-expect-error - whenUnmet requires if or ifNot + flow.step( + { slug: 'second', dependsOn: ['first'], whenUnmet: 'skip' }, + // Handler typed as any to suppress cascading error from failed overload + (deps: any) => deps.first.done + ); + }); + + it('should reject whenUnmet without if or ifNot on root array step', () => { + type FlowInput = { items: string[] }; + + new Flow({ slug: 'test_flow' }) + // @ts-expect-error - whenUnmet requires if or ifNot + .array({ slug: 'getItems', whenUnmet: 'skip' }, (input) => input.items); + }); + + it('should reject whenUnmet without if or ifNot on dependent array step', () => { + const flow = new Flow<{ initial: string }>({ slug: 'test_flow' }).step( + { slug: 'fetch' }, + () => ({ items: ['a', 'b'] }) + ); + + // @ts-expect-error - whenUnmet requires if or ifNot + flow.array( + { slug: 'process', dependsOn: ['fetch'], whenUnmet: 'skip' }, + // Handler typed as any to suppress cascading error from failed overload + (deps: any) => deps.fetch.items + ); + }); + + it('should reject whenUnmet without if or ifNot on root map step', () => { + type FlowInput = { value: number }[]; + + new Flow({ slug: 'test_flow' }) + // @ts-expect-error - whenUnmet requires if or ifNot + .map({ slug: 'process', whenUnmet: 'skip' }, (item) => item.value); + }); + + it('should reject whenUnmet without if or ifNot on dependent map step', () => { + const flow = new Flow<{ initial: string }>({ slug: 'test_flow' }).step( + { slug: 'fetch' }, + () => [{ id: 1 }, { id: 2 }] + ); + + // @ts-expect-error - whenUnmet requires if or ifNot + flow.map( + { slug: 'process', array: 'fetch', whenUnmet: 'skip' }, + // Handler typed as any to suppress cascading error from failed overload + (item: any) => item.id + ); + }); + }); +}); diff --git a/pkgs/dsl/__tests__/types/skippable-deps.test-d.ts b/pkgs/dsl/__tests__/types/skippable-deps.test-d.ts index 54c2a8d3e..703bcfdb3 100644 --- a/pkgs/dsl/__tests__/types/skippable-deps.test-d.ts +++ b/pkgs/dsl/__tests__/types/skippable-deps.test-d.ts @@ -4,16 +4,16 @@ import { describe, it, expectTypeOf } from 'vitest'; /** * Type tests for skippable step dependencies * - * When a step has `else: 'skip' | 'skip-cascade'` or `retriesExhausted: 'skip' | 'skip-cascade'`, + * When a step has `whenUnmet: 'skip' | 'skip-cascade'` or `retriesExhausted: 'skip' | 'skip-cascade'`, * it may not execute. Dependent steps should receive that step's output as an optional key. */ describe('skippable deps type safety', () => { - describe('core skippability - else', () => { - it('step with else: skip makes output optional for dependents', () => { + describe('core skippability - whenUnmet', () => { + it('step with whenUnmet: skip makes output optional for dependents', () => { const flow = new Flow<{ value: number }>({ slug: 'test' }) .step( - { slug: 'conditional', if: { value: 42 }, else: 'skip' }, + { slug: 'conditional', if: { value: 42 }, whenUnmet: 'skip' }, (input) => ({ result: input.value * 2 }) ) .step({ slug: 'dependent', dependsOn: ['conditional'] }, (deps) => { @@ -30,10 +30,10 @@ describe('skippable deps type safety', () => { }>(); }); - it('step with else: skip-cascade makes output optional for dependents', () => { + it('step with whenUnmet: skip-cascade makes output optional for dependents', () => { const flow = new Flow<{ value: number }>({ slug: 'test' }) .step( - { slug: 'conditional', if: { value: 42 }, else: 'skip-cascade' }, + { slug: 'conditional', if: { value: 42 }, whenUnmet: 'skip-cascade' }, (input) => ({ result: input.value * 2 }) ) .step({ slug: 'dependent', dependsOn: ['conditional'] }, (deps) => { @@ -49,14 +49,14 @@ describe('skippable deps type safety', () => { }>(); }); - it('step with else: fail keeps output required (default behavior)', () => { + it('step with whenUnmet: fail keeps output required (default behavior)', () => { const flow = new Flow<{ value: number }>({ slug: 'test' }) .step( - { slug: 'conditional', if: { value: 42 }, else: 'fail' }, + { slug: 'conditional', if: { value: 42 }, whenUnmet: 'fail' }, (input) => ({ result: input.value * 2 }) ) .step({ slug: 'dependent', dependsOn: ['conditional'] }, (deps) => { - // else: 'fail' means step either runs or flow fails - output is guaranteed + // whenUnmet: 'fail' means step either runs or flow fails - output is guaranteed expectTypeOf(deps.conditional).toEqualTypeOf<{ result: number }>(); return { done: true }; }); @@ -67,7 +67,7 @@ describe('skippable deps type safety', () => { }>(); }); - it('step without else keeps output required', () => { + it('step without whenUnmet keeps output required', () => { const flow = new Flow<{ value: number }>({ slug: 'test' }) .step({ slug: 'normal' }, (input) => ({ result: input.value * 2 })) .step({ slug: 'dependent', dependsOn: ['normal'] }, (deps) => { @@ -139,9 +139,12 @@ describe('skippable deps type safety', () => { describe('multiple dependencies - mixed skippability', () => { it('mixed deps: some optional, some required', () => { const flow = new Flow<{ value: number }>({ slug: 'test' }) - .step({ slug: 'skippable', if: { value: 42 }, else: 'skip' }, () => ({ - a: 1, - })) + .step( + { slug: 'skippable', if: { value: 42 }, whenUnmet: 'skip' }, + () => ({ + a: 1, + }) + ) .step({ slug: 'required' }, () => ({ b: 2 })) .step( { slug: 'dependent', dependsOn: ['skippable', 'required'] }, @@ -317,13 +320,13 @@ describe('skippable deps type safety', () => { }>(); }); - it('both else and retriesExhausted set: still skippable', () => { + it('both whenUnmet and retriesExhausted set: still skippable', () => { const flow = new Flow<{ value: number }>({ slug: 'test' }) .step( { slug: 'both', if: { value: 42 }, - else: 'skip', + whenUnmet: 'skip', retriesExhausted: 'skip', }, () => ({ result: 1 }) @@ -459,26 +462,27 @@ describe('skippable deps compile-time errors', () => { }); }); - it('should reject direct property access with else: skip', () => { + it('should reject direct property access with whenUnmet: skip', () => { new Flow<{ value: number }>({ slug: 'test' }) - .step({ slug: 'conditional', if: { value: 42 }, else: 'skip' }, () => ({ - processed: true, - })) + .step( + { slug: 'conditional', if: { value: 42 }, whenUnmet: 'skip' }, + () => ({ processed: true }) + ) .step({ slug: 'next', dependsOn: ['conditional'] }, (deps) => { - // @ts-expect-error - deps.conditional is optional due to else: 'skip' + // @ts-expect-error - deps.conditional is optional due to whenUnmet: skip const flag: boolean = deps.conditional.processed; return { flag }; }); }); - it('should reject direct property access with else: skip-cascade', () => { + it('should reject direct property access with whenUnmet: skip-cascade', () => { new Flow<{ value: number }>({ slug: 'test' }) .step( - { slug: 'cascading', if: { value: 42 }, else: 'skip-cascade' }, + { slug: 'cascading', if: { value: 42 }, whenUnmet: 'skip-cascade' }, () => ({ count: 10 }) ) .step({ slug: 'next', dependsOn: ['cascading'] }, (deps) => { - // @ts-expect-error - deps.cascading is optional due to else: 'skip-cascade' + // @ts-expect-error - deps.cascading is optional due to whenUnmet: skip-cascade const num: number = deps.cascading.count; return { num }; }); diff --git a/pkgs/dsl/src/compile-flow.ts b/pkgs/dsl/src/compile-flow.ts index d321d6dbf..9f197b186 100644 --- a/pkgs/dsl/src/compile-flow.ts +++ b/pkgs/dsl/src/compile-flow.ts @@ -43,7 +43,9 @@ export function compileFlow(flow: AnyFlow): string[] { /** * Formats runtime options into SQL parameter string */ -function formatRuntimeOptions(options: RuntimeOptions | StepRuntimeOptions): string { +function formatRuntimeOptions( + options: RuntimeOptions | StepRuntimeOptions +): string { const parts: string[] = []; if (options.maxAttempts !== undefined) { @@ -68,8 +70,14 @@ function formatRuntimeOptions(options: RuntimeOptions | StepRuntimeOptions): str parts.push(`condition_pattern => '${jsonStr}'`); } - if ('else' in options && options.else !== undefined) { - parts.push(`when_unmet => '${options.else}'`); + if ('ifNot' in options && options.ifNot !== undefined) { + // Serialize JSON pattern and escape for SQL + const jsonStr = JSON.stringify(options.ifNot); + parts.push(`condition_not_pattern => '${jsonStr}'`); + } + + if ('whenUnmet' in options && options.whenUnmet !== undefined) { + parts.push(`when_unmet => '${options.whenUnmet}'`); } if ('retriesExhausted' in options && options.retriesExhausted !== undefined) { diff --git a/pkgs/dsl/src/dsl.ts b/pkgs/dsl/src/dsl.ts index 9805f03b5..4e6abf0ee 100644 --- a/pkgs/dsl/src/dsl.ts +++ b/pkgs/dsl/src/dsl.ts @@ -285,7 +285,7 @@ export type StepOutput< : never; /** - * Checks if a step is skippable (has else: 'skip' | 'skip-cascade' or retriesExhausted: 'skip' | 'skip-cascade') + * Checks if a step is skippable (has whenUnmet: 'skip' | 'skip-cascade' or retriesExhausted: 'skip' | 'skip-cascade') * @template TFlow - The Flow type * @template TStepSlug - The step slug to check */ @@ -319,7 +319,7 @@ type OptionalDeps = { * Asymmetric step input type: * - Root steps (no dependencies): receive flow input directly * - Dependent steps: receive only their dependencies (flow input available via context) - * - Skippable deps (else/retriesExhausted: 'skip' | 'skip-cascade') are optional + * - Skippable deps (whenUnmet/retriesExhausted: 'skip' | 'skip-cascade') are optional * - Required deps are required * * This enables functional composition where subflows can receive typed inputs @@ -400,22 +400,22 @@ export type Context< * * @example * // Fail the step (and run) when pattern doesn't match - * { if: { enabled: true }, else: 'fail' } + * { if: { enabled: true }, whenUnmet: 'fail' } * * @example * // Skip this step only when pattern doesn't match - * { if: { enabled: true }, else: 'skip' } + * { if: { enabled: true }, whenUnmet: 'skip' } * * @example * // Skip this step and all dependents when pattern doesn't match - * { if: { enabled: true }, else: 'skip-cascade' } + * { if: { enabled: true }, whenUnmet: 'skip-cascade' } * * @remarks * - `'fail'`: When pattern doesn't match, step fails -> run fails (default) * - `'skip'`: When pattern doesn't match, skip step and continue (step key omitted from dependent inputs) * - `'skip-cascade'`: When pattern doesn't match, skip step + mark all dependents as skipped */ -export type ElseMode = 'fail' | 'skip' | 'skip-cascade'; +export type WhenUnmetMode = 'fail' | 'skip' | 'skip-cascade'; /** * Options for handling errors after all retries are exhausted @@ -446,7 +446,7 @@ export type RetriesExhaustedMode = 'fail' | 'skip' | 'skip-cascade'; /** * Helper type for dependent step handlers - creates deps object with correct optionality. - * Skippable deps (steps with else/retriesExhausted: 'skip' | 'skip-cascade') are optional. + * Skippable deps (steps with whenUnmet/retriesExhausted: 'skip' | 'skip-cascade') are optional. * Required deps are required. */ type DepsWithOptionalSkippable< @@ -487,23 +487,43 @@ export interface StepRuntimeOptions extends RuntimeOptions { * - Objects: all keys optional, recursively applied * - Arrays: elements expected to be present in target array * - * @see ElseMode for controlling what happens when pattern doesn't match + * @see WhenUnmetMode for controlling what happens when pattern doesn't match */ if?: Json; + /** + * Negative pattern - step executes when input does NOT match this pattern + * + * @example + * // Root step: execute when NOT an admin + * { ifNot: { role: 'admin' } } + * + * @example + * // Combined with 'if' for AND semantics: "active admin who is NOT suspended" + * { if: { role: 'admin', active: true }, ifNot: { suspended: true } } + * + * @remarks + * - Uses PostgreSQL's @> containment check, negated + * - When combined with 'if', BOTH must pass (AND semantics) + * - For mutual exclusion: use same pattern with if on one step, ifNot on another + * + * @see WhenUnmetMode for controlling what happens when condition not met + */ + ifNot?: Json; + /** * What to do when the 'if' pattern doesn't match the input * * @default 'skip' * * @example - * { else: 'fail' } // Pattern doesn't match -> step fails -> run fails - * { else: 'skip' } // Pattern doesn't match -> skip step, continue run - * { else: 'skip-cascade' } // Pattern doesn't match -> skip step + all dependents + * { whenUnmet: 'fail' } // Pattern doesn't match -> step fails -> run fails + * { whenUnmet: 'skip' } // Pattern doesn't match -> skip step, continue run + * { whenUnmet: 'skip-cascade' } // Pattern doesn't match -> skip step + all dependents * - * @see ElseMode for detailed documentation of each mode + * @see WhenUnmetMode for detailed documentation of each mode */ - else?: ElseMode; + whenUnmet?: WhenUnmetMode; /** * What to do when handler throws an error after all retries are exhausted @@ -524,23 +544,58 @@ export interface StepRuntimeOptions extends RuntimeOptions { retriesExhausted?: RetriesExhaustedMode; } -// Base runtime options without 'if' (for typed overloads) +// Base runtime options without condition-related fields interface BaseStepRuntimeOptions extends RuntimeOptions { startDelay?: number; - else?: ElseMode; retriesExhausted?: RetriesExhaustedMode; } -// Typed step options for root steps (if matches FlowInput pattern) -type RootStepOptions = BaseStepRuntimeOptions & { - if?: ContainmentPattern; +/** + * Condition with 'if' required (ifNot optional) - allows whenUnmet. + * whenUnmet only makes sense when there's a condition to be "unmet". + */ +type WithIfCondition = { + if: ContainmentPattern; + ifNot?: ContainmentPattern; + whenUnmet?: WhenUnmetMode; }; -// Typed step options for dependent steps (if matches deps object pattern) -type DependentStepOptions = BaseStepRuntimeOptions & { - if?: ContainmentPattern; +/** + * Condition with 'ifNot' required (if optional) - allows whenUnmet. + */ +type WithIfNotCondition = { + if?: ContainmentPattern; + ifNot: ContainmentPattern; + whenUnmet?: WhenUnmetMode; +}; + +/** + * No condition - if, ifNot, and whenUnmet are all forbidden. + * This ensures whenUnmet can only be used with a condition. + */ +type WithoutCondition = { + if?: never; + ifNot?: never; + whenUnmet?: never; }; +/** + * Discriminated union for condition options. + * whenUnmet is only allowed when if or ifNot is provided. + */ +type ConditionOpts = + | WithIfCondition + | WithIfNotCondition + | WithoutCondition; + +// Typed step options for root steps (if/ifNot match FlowInput pattern) +export type RootStepOptions = BaseStepRuntimeOptions & + ConditionOpts; + +// Typed step options for dependent steps (if/ifNot match deps object pattern) +export type DependentStepOptions = BaseStepRuntimeOptions & + ConditionOpts; + // Define the StepDefinition interface with integrated options export interface StepDefinition< TInput extends AnyInput, @@ -641,19 +696,24 @@ export class Flow< // Overload 1: Root step (no dependsOn) - receives flowInput directly // if is typed as ContainmentPattern + // whenUnmet is only allowed when if or ifNot is provided (enforced by ConditionOpts union) step< Slug extends string, TOutput, - TElse extends ElseMode | undefined = undefined, + TWhenUnmet extends WhenUnmetMode | undefined = undefined, TRetries extends RetriesExhaustedMode | undefined = undefined >( opts: Simplify< { slug: Slug extends keyof Steps ? never : Slug; dependsOn?: never; - else?: TElse; retriesExhausted?: TRetries; - } & Omit, 'else' | 'retriesExhausted'> + } & ( + | (WithIfCondition & { whenUnmet?: TWhenUnmet }) + | (WithIfNotCondition & { whenUnmet?: TWhenUnmet }) + | WithoutCondition + ) & + Omit >, handler: ( flowInput: TFlowInput, @@ -665,7 +725,7 @@ export class Flow< Steps & { [K in Slug]: StepMeta< Awaited, - TElse extends 'skip' | 'skip-cascade' + TWhenUnmet extends 'skip' | 'skip-cascade' ? true : TRetries extends 'skip' | 'skip-cascade' ? true @@ -680,23 +740,29 @@ export class Flow< // if is typed as ContainmentPattern // Note: [Deps, ...Deps[]] requires at least one dependency - empty arrays are rejected at compile time // Handler receives deps with correct optionality based on upstream steps' skippability + // whenUnmet is only allowed when if or ifNot is provided (enforced by ConditionOpts union) step< Slug extends string, Deps extends Extract, TOutput, - TElse extends ElseMode | undefined = undefined, + TWhenUnmet extends WhenUnmetMode | undefined = undefined, TRetries extends RetriesExhaustedMode | undefined = undefined >( opts: Simplify< { slug: Slug extends keyof Steps ? never : Slug; dependsOn: [Deps, ...Deps[]]; - else?: TElse; retriesExhausted?: TRetries; - } & Omit< - DependentStepOptions>>, - 'else' | 'retriesExhausted' - > + } & ( + | (WithIfCondition>> & { + whenUnmet?: TWhenUnmet; + }) + | (WithIfNotCondition< + Simplify> + > & { whenUnmet?: TWhenUnmet }) + | WithoutCondition + ) & + Omit >, handler: ( deps: Simplify>, @@ -708,7 +774,7 @@ export class Flow< Steps & { [K in Slug]: StepMeta< Awaited, - TElse extends 'skip' | 'skip-cascade' + TWhenUnmet extends 'skip' | 'skip-cascade' ? true : TRetries extends 'skip' | 'skip-cascade' ? true @@ -747,7 +813,8 @@ export class Flow< if (opts.timeout !== undefined) options.timeout = opts.timeout; if (opts.startDelay !== undefined) options.startDelay = opts.startDelay; if (opts.if !== undefined) options.if = opts.if; - if (opts.else !== undefined) options.else = opts.else; + if (opts.ifNot !== undefined) options.ifNot = opts.ifNot; + if (opts.whenUnmet !== undefined) options.whenUnmet = opts.whenUnmet; if (opts.retriesExhausted !== undefined) options.retriesExhausted = opts.retriesExhausted; @@ -794,19 +861,24 @@ export class Flow< */ // Overload 1: Root array (no dependsOn) - receives flowInput directly // if is typed as ContainmentPattern + // whenUnmet is only allowed when if or ifNot is provided (enforced by ConditionOpts union) array< Slug extends string, TOutput extends readonly any[], - TElse extends ElseMode | undefined = undefined, + TWhenUnmet extends WhenUnmetMode | undefined = undefined, TRetries extends RetriesExhaustedMode | undefined = undefined >( opts: Simplify< { slug: Slug extends keyof Steps ? never : Slug; dependsOn?: never; - else?: TElse; retriesExhausted?: TRetries; - } & Omit, 'else' | 'retriesExhausted'> + } & ( + | (WithIfCondition & { whenUnmet?: TWhenUnmet }) + | (WithIfNotCondition & { whenUnmet?: TWhenUnmet }) + | WithoutCondition + ) & + Omit >, handler: ( flowInput: TFlowInput, @@ -818,7 +890,7 @@ export class Flow< Steps & { [K in Slug]: StepMeta< Awaited, - TElse extends 'skip' | 'skip-cascade' + TWhenUnmet extends 'skip' | 'skip-cascade' ? true : TRetries extends 'skip' | 'skip-cascade' ? true @@ -832,23 +904,29 @@ export class Flow< // Overload 2: Dependent array (with dependsOn) - receives deps, flowInput via context // if is typed as ContainmentPattern // Note: [Deps, ...Deps[]] requires at least one dependency - empty arrays are rejected at compile time + // whenUnmet is only allowed when if or ifNot is provided (enforced by ConditionOpts union) array< Slug extends string, Deps extends Extract, TOutput extends readonly any[], - TElse extends ElseMode | undefined = undefined, + TWhenUnmet extends WhenUnmetMode | undefined = undefined, TRetries extends RetriesExhaustedMode | undefined = undefined >( opts: Simplify< { slug: Slug extends keyof Steps ? never : Slug; dependsOn: [Deps, ...Deps[]]; - else?: TElse; retriesExhausted?: TRetries; - } & Omit< - DependentStepOptions>>, - 'else' | 'retriesExhausted' - > + } & ( + | (WithIfCondition>> & { + whenUnmet?: TWhenUnmet; + }) + | (WithIfNotCondition< + Simplify> + > & { whenUnmet?: TWhenUnmet }) + | WithoutCondition + ) & + Omit >, handler: ( deps: Simplify>, @@ -860,7 +938,7 @@ export class Flow< Steps & { [K in Slug]: StepMeta< Awaited, - TElse extends 'skip' | 'skip-cascade' + TWhenUnmet extends 'skip' | 'skip-cascade' ? true : TRetries extends 'skip' | 'skip-cascade' ? true @@ -890,6 +968,7 @@ export class Flow< */ // Overload for root map - handler receives item, context includes flowInput // if is typed as ContainmentPattern (checks the array itself) + // whenUnmet is only allowed when if or ifNot is provided (enforced by ConditionOpts union) map< Slug extends string, THandler extends TFlowInput extends readonly (infer Item)[] @@ -898,15 +977,19 @@ export class Flow< context: FlowContext & TContext ) => Json | Promise : never, - TElse extends ElseMode | undefined = undefined, + TWhenUnmet extends WhenUnmetMode | undefined = undefined, TRetries extends RetriesExhaustedMode | undefined = undefined >( opts: Simplify< { slug: Slug extends keyof Steps ? never : Slug; - else?: TElse; retriesExhausted?: TRetries; - } & Omit, 'else' | 'retriesExhausted'> + } & ( + | (WithIfCondition & { whenUnmet?: TWhenUnmet }) + | (WithIfNotCondition & { whenUnmet?: TWhenUnmet }) + | WithoutCondition + ) & + Omit >, handler: THandler ): Flow< @@ -915,7 +998,7 @@ export class Flow< Steps & { [K in Slug]: StepMeta< AwaitedReturn[], - TElse extends 'skip' | 'skip-cascade' + TWhenUnmet extends 'skip' | 'skip-cascade' ? true : TRetries extends 'skip' | 'skip-cascade' ? true @@ -928,6 +1011,7 @@ export class Flow< // Overload for dependent map - handler receives item, context includes flowInput // if is typed as ContainmentPattern<{ arrayDep: ArrayOutput }> (checks the dep object) + // whenUnmet is only allowed when if or ifNot is provided (enforced by ConditionOpts union) map< Slug extends string, TArrayDep extends Extract, @@ -937,19 +1021,24 @@ export class Flow< context: FlowContext & TContext ) => Json | Promise : never, - TElse extends ElseMode | undefined = undefined, + TWhenUnmet extends WhenUnmetMode | undefined = undefined, TRetries extends RetriesExhaustedMode | undefined = undefined >( opts: Simplify< { slug: Slug extends keyof Steps ? never : Slug; array: TArrayDep; - else?: TElse; retriesExhausted?: TRetries; - } & Omit< - DependentStepOptions<{ [K in TArrayDep]: Steps[K]['output'] }>, - 'else' | 'retriesExhausted' - > + } & ( + | (WithIfCondition<{ [K in TArrayDep]: Steps[K]['output'] }> & { + whenUnmet?: TWhenUnmet; + }) + | (WithIfNotCondition<{ [K in TArrayDep]: Steps[K]['output'] }> & { + whenUnmet?: TWhenUnmet; + }) + | WithoutCondition + ) & + Omit >, handler: THandler ): Flow< @@ -958,7 +1047,7 @@ export class Flow< Steps & { [K in Slug]: StepMeta< AwaitedReturn[], - TElse extends 'skip' | 'skip-cascade' + TWhenUnmet extends 'skip' | 'skip-cascade' ? true : TRetries extends 'skip' | 'skip-cascade' ? true @@ -1003,7 +1092,8 @@ export class Flow< if (opts.timeout !== undefined) options.timeout = opts.timeout; if (opts.startDelay !== undefined) options.startDelay = opts.startDelay; if (opts.if !== undefined) options.if = opts.if; - if (opts.else !== undefined) options.else = opts.else; + if (opts.ifNot !== undefined) options.ifNot = opts.ifNot; + if (opts.whenUnmet !== undefined) options.whenUnmet = opts.whenUnmet; if (opts.retriesExhausted !== undefined) options.retriesExhausted = opts.retriesExhausted;