From 6eb8f61c5b9777cb21ac68eff13aa4f0722f691a Mon Sep 17 00:00:00 2001 From: Hilary James Oliver Date: Mon, 17 Jun 2024 13:53:10 +0000 Subject: [PATCH] Fix workflow-state command and xtrigger. (#5809) undefined --- changes.d/5809.feat.d | 1 + changes.d/5809.fix.d | 2 + cylc/flow/cfgspec/workflow.py | 88 ++- cylc/flow/command_polling.py | 73 +-- cylc/flow/config.py | 81 ++- cylc/flow/cycling/util.py | 7 +- cylc/flow/data_store_mgr.py | 12 +- cylc/flow/dbstatecheck.py | 346 ++++++++--- .../examples/event-driven-cycling/.validate | 7 +- .../inter-workflow-triggers/.validate | 7 +- .../downstream/flow.cylc | 2 +- cylc/flow/exceptions.py | 9 +- cylc/flow/graph_parser.py | 9 +- cylc/flow/option_parsers.py | 13 +- cylc/flow/parsec/upgrade.py | 22 +- cylc/flow/parsec/validate.py | 5 +- cylc/flow/rundb.py | 24 +- cylc/flow/scheduler.py | 1 - cylc/flow/scripts/function_run.py | 9 +- cylc/flow/scripts/validate_install_play.py | 5 +- cylc/flow/scripts/validate_reinstall.py | 4 + cylc/flow/scripts/workflow_state.py | 570 ++++++++++++------ cylc/flow/subprocctx.py | 8 +- cylc/flow/subprocpool.py | 4 +- cylc/flow/task_events_mgr.py | 10 +- cylc/flow/task_job_mgr.py | 6 +- cylc/flow/task_outputs.py | 57 +- cylc/flow/task_pool.py | 85 ++- cylc/flow/util.py | 21 +- cylc/flow/workflow_db_mgr.py | 34 +- cylc/flow/xtrigger_mgr.py | 458 ++++++++------ cylc/flow/xtriggers/suite_state.py | 12 +- cylc/flow/xtriggers/workflow_state.py | 212 +++++-- .../cylc-poll/16-execution-time-limit.t | 2 +- tests/flakyfunctional/events/44-timeout.t | 2 +- .../xtriggers/01-workflow_state.t | 27 +- .../xtriggers/01-workflow_state/flow.cylc | 9 +- .../01-workflow_state/upstream/flow.cylc | 4 +- tests/functional/cylc-cat-log/04-local-tail.t | 2 +- tests/functional/cylc-config/00-simple.t | 5 +- .../cylc-config/00-simple/section2.stdout | 52 +- .../cylc-play/07-timezones-compat.t | 5 +- .../cylc-set/00-set-succeeded/flow.cylc | 8 +- tests/functional/cylc-set/05-expire.t | 2 +- .../data-store/00-prune-optional-break.t | 4 +- .../01-cylc8-basic/validation.stderr | 7 +- .../functional/flow-triggers/11-wait-merge.t | 16 +- tests/functional/job-submission/16-timeout.t | 2 +- tests/functional/logging/04-dev_mode.t | 4 +- .../optional-outputs/08-finish-fail-c7-c8.t | 2 +- tests/functional/queues/02-queueorder.t | 2 +- tests/functional/queues/qsize/flow.cylc | 8 +- tests/functional/reload/03-queues/flow.cylc | 6 +- .../reload/22-remove-task-cycling.t | 2 +- tests/functional/restart/30-outputs.t | 2 +- tests/functional/restart/30-outputs/flow.cylc | 4 +- .../restart/34-auto-restart-basic.t | 14 +- .../restart/38-auto-restart-stopping.t | 3 +- .../restart/41-auto-restart-local-jobs.t | 5 +- tests/functional/workflow-state/00-polling.t | 19 +- tests/functional/workflow-state/01-polling.t | 8 +- tests/functional/workflow-state/05-message.t | 34 -- tests/functional/workflow-state/05-output.t | 32 + tests/functional/workflow-state/06-format.t | 24 +- .../functional/workflow-state/06a-noformat.t | 29 +- tests/functional/workflow-state/07-message2.t | 11 +- tests/functional/workflow-state/08-integer.t | 82 +++ tests/functional/workflow-state/09-datetime.t | 121 ++++ .../functional/workflow-state/10-backcompat.t | 54 ++ tests/functional/workflow-state/11-multi.t | 130 ++++ .../functional/workflow-state/11-multi/c7.sql | 39 ++ .../workflow-state/11-multi/c8a.sql | 48 ++ .../workflow-state/11-multi/c8b.sql | 48 ++ .../workflow-state/11-multi/flow.cylc | 69 +++ .../workflow-state/11-multi/reference.log | 17 + .../workflow-state/11-multi/upstream/suite.rc | 17 + .../workflow-state/backcompat/schema-1.sql | 49 ++ .../workflow-state/backcompat/schema-2.sql | 49 ++ .../workflow-state/backcompat/suite.rc | 16 + .../workflow-state/datetime/flow.cylc | 21 + .../workflow-state/integer/flow.cylc | 14 + .../workflow-state/options/flow.cylc | 6 +- .../{message => output}/flow.cylc | 0 .../{message => output}/reference.log | 0 .../workflow-state/polling/flow.cylc | 5 +- .../workflow-state/template_ref/flow.cylc | 13 - .../workflow-state/template_ref/reference.log | 4 - tests/functional/xtriggers/03-sequence.t | 1 - tests/functional/xtriggers/04-sequential.t | 17 +- tests/integration/conftest.py | 124 ++-- .../scripts/test_validate_integration.py | 4 +- tests/integration/test_config.py | 8 +- tests/integration/test_dbstatecheck.py | 139 +++++ .../integration/test_sequential_xtriggers.py | 2 +- tests/integration/test_xtrigger_mgr.py | 43 ++ tests/unit/cycling/test_util.py | 8 +- tests/unit/test_config.py | 16 +- tests/unit/test_db_compat.py | 4 +- tests/unit/test_graph_parser.py | 3 +- tests/unit/test_util.py | 6 +- tests/unit/test_xtrigger_mgr.py | 118 ++-- tests/unit/xtriggers/test_workflow_state.py | 244 ++++++-- tox.ini | 4 +- 103 files changed, 2940 insertions(+), 1163 deletions(-) create mode 100644 changes.d/5809.feat.d create mode 100644 changes.d/5809.fix.d delete mode 100755 tests/functional/workflow-state/05-message.t create mode 100755 tests/functional/workflow-state/05-output.t create mode 100755 tests/functional/workflow-state/08-integer.t create mode 100755 tests/functional/workflow-state/09-datetime.t create mode 100755 tests/functional/workflow-state/10-backcompat.t create mode 100644 tests/functional/workflow-state/11-multi.t create mode 100644 tests/functional/workflow-state/11-multi/c7.sql create mode 100644 tests/functional/workflow-state/11-multi/c8a.sql create mode 100644 tests/functional/workflow-state/11-multi/c8b.sql create mode 100644 tests/functional/workflow-state/11-multi/flow.cylc create mode 100644 tests/functional/workflow-state/11-multi/reference.log create mode 100644 tests/functional/workflow-state/11-multi/upstream/suite.rc create mode 100644 tests/functional/workflow-state/backcompat/schema-1.sql create mode 100644 tests/functional/workflow-state/backcompat/schema-2.sql create mode 100644 tests/functional/workflow-state/backcompat/suite.rc create mode 100644 tests/functional/workflow-state/datetime/flow.cylc create mode 100644 tests/functional/workflow-state/integer/flow.cylc rename tests/functional/workflow-state/{message => output}/flow.cylc (100%) rename tests/functional/workflow-state/{message => output}/reference.log (100%) delete mode 100644 tests/functional/workflow-state/template_ref/flow.cylc delete mode 100644 tests/functional/workflow-state/template_ref/reference.log create mode 100644 tests/integration/test_dbstatecheck.py diff --git a/changes.d/5809.feat.d b/changes.d/5809.feat.d new file mode 100644 index 00000000000..c5e7d0afe5e --- /dev/null +++ b/changes.d/5809.feat.d @@ -0,0 +1 @@ +The workflow-state command and xtrigger are now flow-aware and take universal IDs instead of separate arguments for cycle point, task name, etc. (which are still supported, but deprecated). diff --git a/changes.d/5809.fix.d b/changes.d/5809.fix.d new file mode 100644 index 00000000000..36fc5fbf481 --- /dev/null +++ b/changes.d/5809.fix.d @@ -0,0 +1,2 @@ +Fix bug where the "cylc workflow-state" command only polled for +task-specific status queries and custom outputs. diff --git a/cylc/flow/cfgspec/workflow.py b/cylc/flow/cfgspec/workflow.py index 1e1fb73f712..934897bdbb4 100644 --- a/cylc/flow/cfgspec/workflow.py +++ b/cylc/flow/cfgspec/workflow.py @@ -1020,8 +1020,9 @@ def get_script_common_text(this: str, example: Optional[str] = None): task has generated the outputs it was expected to. If the task fails this check its outputs are considered - :term:`incomplete` and a warning will be raised alerting you - that something has gone wrong which requires investigation. + :term:`incomplete ` and a warning will be + raised alerting you that something has gone wrong which + requires investigation. .. note:: @@ -1731,57 +1732,34 @@ def get_script_common_text(this: str, example: Optional[str] = None): ''') with Conf('workflow state polling', desc=f''' - Configure automatic workflow polling tasks as described in - :ref:`WorkflowStatePolling`. - - The items in this section reflect - options and defaults of the ``cylc workflow-state`` command, - except that the target workflow ID and the - ``--task``, ``--cycle``, and ``--status`` options are - taken from the graph notation. + Deprecated support for automatic workflow state polling tasks + as described in :ref:`WorkflowStatePolling`. Note the Cylc 7 + "user" and "host" config items are not supported. .. versionchanged:: 8.0.0 {REPLACES}``[runtime][]suite state polling``. - '''): - Conf('user', VDR.V_STRING, desc=''' - Username of your account on the workflow host. - The polling - ``cylc workflow-state`` command will be - run on the remote account. - ''') - Conf('host', VDR.V_STRING, desc=''' - The hostname of the target workflow. + .. deprecated:: 8.3.0 - The polling - ``cylc workflow-state`` command will be run there. - ''') + Please use the :ref:`workflow_state xtrigger + ` instead. + '''): Conf('interval', VDR.V_INTERVAL, desc=''' Polling interval. ''') Conf('max-polls', VDR.V_INTEGER, desc=''' - The maximum number of polls before timing out and entering - the "failed" state. + Maximum number of polls to attempt before the task fails. ''') Conf('message', VDR.V_STRING, desc=''' - Wait for the task in the target workflow to receive a - specified message rather than achieve a state. + Target task output (task message, not trigger name). ''') - Conf('run-dir', VDR.V_STRING, desc=''' - Specify the location of the top level cylc-run directory - for the other workflow. - - For your own workflows, there is no need to set this as it - is always ``~/cylc-run/``. But for other workflows, - (e.g those owned by others), or mirrored workflow databases - use this item to specify the location of the top level - cylc run directory (the database should be in a the same - place relative to this location for each workflow). + Conf('alt-cylc-run-dir', VDR.V_STRING, desc=''' + The cylc-run directory location of the target workflow. + Use to poll workflows owned by other users. ''') Conf('verbose mode', VDR.V_BOOLEAN, desc=''' - Run the polling ``cylc workflow-state`` command in verbose - output mode. + Run the ``cylc workflow-state`` command in verbose mode. ''') with Conf('environment', desc=''' @@ -1958,9 +1936,10 @@ def upg(cfg, descr): """ u = upgrader(cfg, descr) + u.obsolete( - '7.8.0', - ['runtime', '__MANY__', 'suite state polling', 'template']) + '7.8.0', ['runtime', '__MANY__', 'suite state polling', 'template'] + ) u.obsolete('7.8.1', ['cylc', 'events', 'reset timer']) u.obsolete('7.8.1', ['cylc', 'events', 'reset inactivity timer']) u.obsolete('8.0.0', ['cylc', 'force run mode']) @@ -1996,6 +1975,25 @@ def upg(cfg, descr): ['cylc', 'mail', 'task event batch interval'], silent=cylc.flow.flags.cylc7_back_compat, ) + u.deprecate( + '8.0.0', + ['runtime', '__MANY__', 'suite state polling'], + ['runtime', '__MANY__', 'workflow state polling'], + silent=cylc.flow.flags.cylc7_back_compat, + is_section=True, + ) + u.obsolete( + '8.0.0', ['runtime', '__MANY__', 'workflow state polling', 'host']) + u.obsolete( + '8.0.0', ['runtime', '__MANY__', 'workflow state polling', 'user']) + + u.deprecate( + '8.3.0', + ['runtime', '__MANY__', 'workflow state polling', 'run-dir'], + ['runtime', '__MANY__', 'workflow state polling', 'alt-cylc-run-dir'], + silent=cylc.flow.flags.cylc7_back_compat, + ) + u.deprecate( '8.0.0', ['cylc', 'parameters'], @@ -2063,14 +2061,6 @@ def upg(cfg, descr): silent=cylc.flow.flags.cylc7_back_compat, ) - u.deprecate( - '8.0.0', - ['runtime', '__MANY__', 'suite state polling'], - ['runtime', '__MANY__', 'workflow state polling'], - silent=cylc.flow.flags.cylc7_back_compat, - is_section=True - ) - for job_setting in [ 'execution polling intervals', 'execution retry delays', @@ -2196,7 +2186,7 @@ def upgrade_graph_section(cfg: Dict[str, Any], descr: str) -> None: keys.add(key) if keys and not cylc.flow.flags.cylc7_back_compat: msg = ( - 'deprecated graph items were automatically upgraded ' + 'graph items were automatically upgraded ' f'in "{descr}":\n' f' * (8.0.0) {msg_old} -> {msg_new}' ) diff --git a/cylc/flow/command_polling.py b/cylc/flow/command_polling.py index dcf186edbd9..1c70e7c59a9 100644 --- a/cylc/flow/command_polling.py +++ b/cylc/flow/command_polling.py @@ -17,6 +17,7 @@ import sys from time import sleep +from cylc.flow import LOG class Poller: @@ -25,39 +26,30 @@ class Poller: @classmethod def add_to_cmd_options(cls, parser, d_interval=60, d_max_polls=10): - """Add command line options for commands that can do polling""" + """Add command line options for commands that can do polling.""" parser.add_option( "--max-polls", help=r"Maximum number of polls (default: %default).", + type="int", metavar="INT", action="store", dest="max_polls", - default=d_max_polls) + default=d_max_polls + ) parser.add_option( "--interval", help=r"Polling interval in seconds (default: %default).", + type="int", metavar="SECS", action="store", dest="interval", - default=d_interval) + default=d_interval + ) def __init__(self, condition, interval, max_polls, args): - self.condition = condition # e.g. "workflow stopped" - - # check max_polls is an int - try: - self.max_polls = int(max_polls) - except ValueError: - sys.exit("max_polls must be an int") - - # check interval is an int - try: - self.interval = int(interval) - except ValueError: - sys.exit("interval must be an integer") - - self.n_polls = 0 + self.interval = interval + self.max_polls = max_polls or 1 # no point in zero polls self.args = args # any extra parameters needed by check() async def check(self): @@ -66,29 +58,28 @@ async def check(self): async def poll(self): """Poll for the condition embodied by self.check(). - Return True if condition met, or False if polling exhausted.""" - if self.max_polls == 0: - # exit 1 as we can't know if the condition is satisfied - sys.exit("WARNING: nothing to do (--max-polls=0)") - elif self.max_polls == 1: - sys.stdout.write("checking for '%s'" % self.condition) - else: - sys.stdout.write("polling for '%s'" % self.condition) + Return True if condition met, or False if polling exhausted. + + """ + n_polls = 0 + result = False + + while True: + n_polls += 1 + result = await self.check() + if self.max_polls != 1: + sys.stderr.write(".") + sys.stderr.flush() + if result or n_polls >= self.max_polls: + if self.max_polls != 1: + sys.stderr.write("\n") + sys.stderr.flush() + break + sleep(self.interval) - while self.n_polls < self.max_polls: - self.n_polls += 1 - if await self.check(): - sys.stdout.write(": satisfied\n") - return True - if self.max_polls > 1: - sys.stdout.write(".") - sleep(self.interval) - sys.stdout.write("\n") - if self.max_polls > 1: - sys.stderr.write( - "ERROR: condition not satisfied after %d polls\n" % - self.max_polls) + if result: + return True else: - sys.stderr.write("ERROR: condition not satisfied\n") - return False + LOG.error(f"failed after {n_polls} polls") + return False diff --git a/cylc/flow/config.py b/cylc/flow/config.py index 096d83d69ad..5b7738f3e6c 100644 --- a/cylc/flow/config.py +++ b/cylc/flow/config.py @@ -115,7 +115,7 @@ check_deprecation, ) from cylc.flow.workflow_status import RunMode -from cylc.flow.xtrigger_mgr import XtriggerManager +from cylc.flow.xtrigger_mgr import XtriggerCollator if TYPE_CHECKING: from optparse import Values @@ -220,7 +220,6 @@ def __init__( options: 'Values', template_vars: Optional[Mapping[str, Any]] = None, output_fname: Optional[str] = None, - xtrigger_mgr: Optional[XtriggerManager] = None, mem_log_func: Optional[Callable[[str], None]] = None, run_dir: Optional[str] = None, log_dir: Optional[str] = None, @@ -262,7 +261,7 @@ def __init__( self.taskdefs: Dict[str, TaskDef] = {} self.expiration_offsets = {} self.ext_triggers = {} # Old external triggers (client/server) - self.xtrigger_mgr = xtrigger_mgr + self.xtrigger_collator = XtriggerCollator() self.workflow_polling_tasks = {} # type: ignore # TODO figure out type self.initial_point: 'PointBase' @@ -1513,6 +1512,19 @@ def adopt_orphans(self, orphans): self.runtime['linearized ancestors'][orphan] = [orphan, 'root'] def configure_workflow_state_polling_tasks(self): + + # Deprecation warning - automatic workflow state polling tasks don't + # necessarily have deprecated config items outside the graph string. + if ( + self.workflow_polling_tasks and + getattr(self.options, 'is_validate', False) + ): + LOG.warning( + "Workflow state polling tasks are deprecated." + " Please convert to workflow_state xtriggers:\n * " + + "\n * ".join(self.workflow_polling_tasks) + ) + # Check custom script not defined for automatic workflow polling tasks. for l_task in self.workflow_polling_tasks: try: @@ -1531,25 +1543,42 @@ def configure_workflow_state_polling_tasks(self): continue rtc = tdef.rtconfig comstr = ( - "cylc workflow-state" - f" --task={tdef.workflow_polling_cfg['task']}" - " --point=$CYLC_TASK_CYCLE_POINT" + "cylc workflow-state " + f"{tdef.workflow_polling_cfg['workflow']}//" + "$CYLC_TASK_CYCLE_POINT/" + f"{tdef.workflow_polling_cfg['task']}" ) + graph_selector = tdef.workflow_polling_cfg['status'] + config_message = rtc['workflow state polling']['message'] + if ( + graph_selector is not None and + ( + config_message is not None + ) and ( + graph_selector != config_message + ) + ): + raise WorkflowConfigError( + f'Polling task "{name}" must configure a target status or' + f' output message in the graph (:{graph_selector}) or task' + f' definition (message = "{config_message}") but not both.' + ) + if graph_selector is not None: + comstr += f":{graph_selector}" + elif config_message is not None: + # quote: may contain spaces + comstr += f':"{config_message}" --messages' + else: + # default to :succeeded + comstr += f":{TASK_OUTPUT_SUCCEEDED}" + for key, fmt in [ - ('user', ' --%s=%s'), - ('host', ' --%s=%s'), ('interval', ' --%s=%d'), ('max-polls', ' --%s=%s'), - ('run-dir', ' --%s=%s')]: + ('alt-cylc-run-dir', ' --%s=%s')]: if rtc['workflow state polling'][key]: comstr += fmt % (key, rtc['workflow state polling'][key]) - if rtc['workflow state polling']['message']: - comstr += " --message='%s'" % ( - rtc['workflow state polling']['message']) - else: - comstr += " --status=" + tdef.workflow_polling_cfg['status'] - comstr += " " + tdef.workflow_polling_cfg['workflow'] - script = "echo " + comstr + "\n" + comstr + script = f"echo {comstr}\n{comstr}" rtc['script'] = script def get_parent_lists(self): @@ -1905,10 +1934,9 @@ def generate_triggers(self, lexpression, left_nodes, right, seq, f'Invalid xtrigger name "{label}" - {msg}' ) - if self.xtrigger_mgr is not None: - self.xtrigger_mgr.sequential_xtriggers_default = ( - self.cfg['scheduling']['sequential xtriggers'] - ) + self.xtrigger_collator.sequential_xtriggers_default = ( + self.cfg['scheduling']['sequential xtriggers'] + ) for label in xtrig_labels: try: xtrig = xtrigs[label] @@ -1928,13 +1956,7 @@ def generate_triggers(self, lexpression, left_nodes, right, seq, f" {label} = {xtrig.get_signature()}" ) - # Generic xtrigger validation. - XtriggerManager.check_xtrigger(label, xtrig, self.fdir) - - if self.xtrigger_mgr: - # (not available during validation) - self.xtrigger_mgr.add_trig(label, xtrig, self.fdir) - + self.xtrigger_collator.add_trig(label, xtrig, self.fdir) self.taskdefs[right].add_xtrig_label(label, seq) def get_actual_first_point(self, start_point): @@ -2624,10 +2646,7 @@ def upgrade_clock_triggers(self): # Define the xtrigger function. args = [] if offset is None else [offset] xtrig = SubFuncContext(label, 'wall_clock', args, {}) - if self.xtrigger_mgr is None: - XtriggerManager.check_xtrigger(label, xtrig, self.fdir) - else: - self.xtrigger_mgr.add_trig(label, xtrig, self.fdir) + self.xtrigger_collator.add_trig(label, xtrig, self.fdir) # Add it to the task, for each sequence that the task appears in. taskdef = self.get_taskdef(task_name) for seq in taskdef.sequences: diff --git a/cylc/flow/cycling/util.py b/cylc/flow/cycling/util.py index 7f22d43a600..b47a8aae886 100644 --- a/cylc/flow/cycling/util.py +++ b/cylc/flow/cycling/util.py @@ -18,14 +18,17 @@ from metomi.isodatetime.parsers import TimePointParser, DurationParser -def add_offset(cycle_point, offset): +def add_offset(cycle_point, offset, dmp_fmt=None): """Add a (positive or negative) offset to a cycle point. Return the result. """ my_parser = TimePointParser() - my_target_point = my_parser.parse(cycle_point, dump_as_parsed=True) + if dmp_fmt is None: + my_target_point = my_parser.parse(cycle_point, dump_as_parsed=True) + else: + my_target_point = my_parser.parse(cycle_point, dump_format=dmp_fmt) my_offset_parser = DurationParser() oper = "+" diff --git a/cylc/flow/data_store_mgr.py b/cylc/flow/data_store_mgr.py index 9b3b39509a2..c59fa7b6c62 100644 --- a/cylc/flow/data_store_mgr.py +++ b/cylc/flow/data_store_mgr.py @@ -101,8 +101,8 @@ from cylc.flow.taskdef import generate_graph_parents, generate_graph_children from cylc.flow.task_state import TASK_STATUSES_FINAL from cylc.flow.util import ( - serialise, - deserialise + serialise_set, + deserialise_set ) from cylc.flow.wallclock import ( TIME_ZONE_LOCAL_INFO, @@ -1186,7 +1186,7 @@ def generate_ghost_task( submit_num=0, data_mode=True, sequential_xtrigger_labels=( - self.schd.xtrigger_mgr.sequential_xtrigger_labels + self.schd.xtrigger_mgr.xtriggers.sequential_xtrigger_labels ), ) @@ -1411,7 +1411,7 @@ def apply_task_proxy_db_history(self): relative_id = tokens.relative_id itask, is_parent = self.db_load_task_proxies[relative_id] itask.submit_num = submit_num - flow_nums = deserialise(flow_nums_str) + flow_nums = deserialise_set(flow_nums_str) # Do not set states and outputs for future tasks in flow. if ( itask.flow_nums and @@ -1487,7 +1487,7 @@ def _process_internal_task_proxy( update_time = time() tproxy.state = itask.state.status - tproxy.flow_nums = serialise(itask.flow_nums) + tproxy.flow_nums = serialise_set(itask.flow_nums) prereq_list = [] for prereq in itask.state.prerequisites: @@ -1778,7 +1778,7 @@ def window_resize_rewalk(self) -> None: self.increment_graph_window( tokens, get_point(tokens['cycle']), - deserialise(tproxy.flow_nums) + deserialise_set(tproxy.flow_nums) ) # Flag difference between old and new window for pruning. self.prune_flagged_nodes.update( diff --git a/cylc/flow/dbstatecheck.py b/cylc/flow/dbstatecheck.py index ca45b5deba6..1fae3e0feb5 100644 --- a/cylc/flow/dbstatecheck.py +++ b/cylc/flow/dbstatecheck.py @@ -19,42 +19,48 @@ import os import sqlite3 import sys +from contextlib import suppress +from typing import Dict, Iterable, Optional, List, Union +from cylc.flow.exceptions import InputError +from cylc.flow.cycling.util import add_offset +from cylc.flow.cycling.integer import ( + IntegerPoint, + IntegerInterval +) +from cylc.flow.flow_mgr import stringify_flow_nums from cylc.flow.pathutil import expand_path from cylc.flow.rundb import CylcWorkflowDAO -from cylc.flow.task_state import ( - TASK_STATUS_SUBMITTED, - TASK_STATUS_RUNNING, - TASK_STATUS_SUCCEEDED, - TASK_STATUS_FAILED +from cylc.flow.task_outputs import ( + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_FAILED, + TASK_OUTPUT_FINISHED, +) +from cylc.flow.util import deserialise_set +from metomi.isodatetime.parsers import TimePointParser +from metomi.isodatetime.exceptions import ISO8601SyntaxError + + +output_fallback_msg = ( + "Unable to filter by task output label for tasks run in Cylc versions " + "between 8.0.0-8.3.0. Falling back to filtering by task message instead." ) class CylcWorkflowDBChecker: - """Object for querying a workflow database""" - STATE_ALIASES = { - 'finish': [ - TASK_STATUS_FAILED, - TASK_STATUS_SUCCEEDED - ], - 'start': [ - TASK_STATUS_RUNNING, - TASK_STATUS_SUCCEEDED, - TASK_STATUS_FAILED - ], - 'submit': [ - TASK_STATUS_SUBMITTED, - TASK_STATUS_RUNNING, - TASK_STATUS_SUCCEEDED, - TASK_STATUS_FAILED - ], - 'fail': [ - TASK_STATUS_FAILED - ], - 'succeed': [ - TASK_STATUS_SUCCEEDED - ], - } + """Object for querying task status or outputs from a workflow database. + + Back-compat and task outputs: + # Cylc 7 stored {trigger: message} for custom outputs only. + 1|foo|{"x": "the quick brown"} + + # Cylc 8 (pre-8.3.0) stored [message] only, for all outputs. + 1|foo|[1]|["submitted", "started", "succeeded", "the quick brown"] + + # Cylc 8 (8.3.0+) stores {trigger: message} for all ouputs. + 1|foo|[1]|{"submitted": "submitted", "started": "started", + "succeeded": "succeeded", "x": "the quick brown"} + """ def __init__(self, rund, workflow, db_path=None): # (Explicit dp_path arg is to make testing easier). @@ -65,17 +71,93 @@ def __init__(self, rund, workflow, db_path=None): ) if not os.path.exists(db_path): raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), db_path) + self.conn = sqlite3.connect(db_path, timeout=10.0) + # Get workflow point format. + try: + self.db_point_fmt = self._get_db_point_format() + self.c7_back_compat_mode = False + except sqlite3.OperationalError as exc: + # BACK COMPAT: Cylc 7 DB (see method below). + try: + self.db_point_fmt = self._get_db_point_format_compat() + self.c7_back_compat_mode = True + except sqlite3.OperationalError: + raise exc # original error + + def adjust_point_to_db(self, cycle, offset): + """Adjust a cycle point (with offset) to the DB point format. + + Cycle point queries have to match in the DB as string literals, + so we convert given cycle points (e.g., from the command line) + to the DB point format before making the query. + + """ + if cycle is None or "*" in cycle: + if offset is not None: + raise InputError( + f'Cycle point "{cycle}" is not compatible with an offset.' + ) + # Nothing to do + return cycle + + if offset is not None: + if self.db_point_fmt is None: + # integer cycling + cycle = str( + IntegerPoint(cycle) + + IntegerInterval(offset) + ) + else: + cycle = str( + add_offset(cycle, offset) + ) + + if self.db_point_fmt is None: + return cycle + + # Convert cycle point to DB format. + try: + cycle = str( + TimePointParser().parse( + cycle, dump_format=self.db_point_fmt + ) + ) + except ISO8601SyntaxError: + raise InputError( + f'Cycle point "{cycle}" is not compatible' + f' with DB point format "{self.db_point_fmt}"' + ) + return cycle + @staticmethod - def display_maps(res): + def display_maps(res, old_format=False, pretty_print=False): if not res: sys.stderr.write("INFO: No results to display.\n") else: for row in res: - sys.stdout.write((", ").join(row) + "\n") + if old_format: + sys.stdout.write(', '.join(row) + '\n') + else: + out = f"{row[1]}/{row[0]}:" # cycle/task: + status_or_outputs = row[2] + if pretty_print: + with suppress(json.decoder.JSONDecodeError): + status_or_outputs = ( + json.dumps( + json.loads( + status_or_outputs.replace("'", '"') + ), + indent=4 + ) + ) + out += status_or_outputs + if len(row) == 4: + out += row[3] # flow + sys.stdout.write(out + "\n") - def get_remote_point_format(self): + def _get_db_point_format(self): """Query a workflow database for a 'cycle point format' entry""" for row in self.conn.execute( rf''' @@ -90,11 +172,16 @@ def get_remote_point_format(self): ): return row[0] - def get_remote_point_format_compat(self): - """Query a Cylc 7 suite database for a 'cycle point format' entry. - - Back compat for Cylc 8 workflow state triggers targeting Cylc 7 DBs. - """ + def _get_db_point_format_compat(self): + """Query a Cylc 7 suite database for 'cycle point format'.""" + # BACK COMPAT: Cylc 7 DB + # Workflows parameters table name change. + # from: + # 8.0.x + # to: + # 8.1.x + # remove at: + # 8.x for row in self.conn.execute( rf''' SELECT @@ -108,27 +195,56 @@ def get_remote_point_format_compat(self): ): return row[0] - def state_lookup(self, state): - """allows for multiple states to be searched via a status alias""" - if state in self.STATE_ALIASES: - return self.STATE_ALIASES[state] - else: - return [state] - def workflow_state_query( - self, task, cycle, status=None, message=None, mask=None): - """run a query on the workflow database""" + self, + task: Optional[str] = None, + cycle: Optional[str] = None, + selector: Optional[str] = None, + is_trigger: Optional[bool] = False, + is_message: Optional[bool] = False, + flow_num: Optional[int] = None, + print_outputs: bool = False + ) -> List[List[str]]: + """Query task status or outputs (by trigger or message) in a database. + + Args: + task: + task name + cycle: + cycle point + selector: + task status, trigger name, or message + is_trigger: + intpret the selector as a trigger + is_message: + interpret the selector as a task message + + Return: + A list of results for all tasks that match the query. + [ + [name, cycle, result, [flow]], + ... + ] + + "result" is single string: + - for status queries: the task status + - for output queries: a serialized dict of completed outputs + {trigger: message} + + """ stmt_args = [] stmt_wheres = [] - if mask is None: - mask = "name, cycle, status" - - if message: + if is_trigger or is_message: target_table = CylcWorkflowDAO.TABLE_TASK_OUTPUTS - mask = "outputs" + mask = "name, cycle, outputs" else: target_table = CylcWorkflowDAO.TABLE_TASK_STATES + mask = "name, cycle, status" + + if not self.c7_back_compat_mode: + # Cylc 8 DBs only + mask += ", flow_nums" stmt = rf''' SELECT @@ -138,49 +254,103 @@ def workflow_state_query( ''' # nosec # * mask is hardcoded # * target_table is a code constant - if task is not None: - stmt_wheres.append("name==?") + + # Select from DB by name, cycle, status. + # (Outputs and flow_nums are serialised). + if task: + if '*' in task: + # Replace Cylc ID wildcard with Sqlite query wildcard. + task = task.replace('*', '%') + stmt_wheres.append("name like ?") + else: + stmt_wheres.append("name==?") stmt_args.append(task) - if cycle is not None: - stmt_wheres.append("cycle==?") + + if cycle: + if '*' in cycle: + # Replace Cylc ID wildcard with Sqlite query wildcard. + cycle = cycle.replace('*', '%') + stmt_wheres.append("cycle like ?") + else: + stmt_wheres.append("cycle==?") stmt_args.append(cycle) - if status: - stmt_frags = [] - for state in self.state_lookup(status): - stmt_args.append(state) - stmt_frags.append("status==?") - stmt_wheres.append("(" + (" OR ").join(stmt_frags) + ")") + if ( + selector is not None + and target_table == CylcWorkflowDAO.TABLE_TASK_STATES + ): + # Can select by status in the DB but not outputs. + stmt_wheres.append("status==?") + stmt_args.append(selector) + if stmt_wheres: - stmt += " where " + (" AND ").join(stmt_wheres) + stmt += "WHERE\n " + (" AND ").join(stmt_wheres) + + if target_table == CylcWorkflowDAO.TABLE_TASK_STATES: + # (outputs table doesn't record submit number) + stmt += r"ORDER BY submit_num" - res = [] + # Query the DB and drop incompatible rows. + db_res = [] for row in self.conn.execute(stmt, stmt_args): - if not all(v is None for v in row): - res.append(list(row)) - - return res - - def task_state_getter(self, task, cycle): - """used to get the state of a particular task at a particular cycle""" - return self.workflow_state_query(task, cycle, mask="status")[0] - - def task_state_met(self, task, cycle, status=None, message=None): - """used to check if a task is in a particular state""" - res = self.workflow_state_query(task, cycle, status, message) - if status: - return bool(res) - elif message: - return any( - message == value - for outputs_str, in res - for value in json.loads(outputs_str) - ) + # name, cycle, status_or_outputs, [flow_nums] + res = list(row[:3]) + if row[2] is None: + # status can be None in Cylc 7 DBs + continue + if not self.c7_back_compat_mode: + flow_nums = deserialise_set(row[3]) + if flow_num is not None and flow_num not in flow_nums: + # skip result, wrong flow + continue + fstr = stringify_flow_nums(flow_nums) + if fstr: + res.append(fstr) + db_res.append(res) + + if target_table == CylcWorkflowDAO.TABLE_TASK_STATES: + return db_res + + warn_output_fallback = is_trigger + results = [] + for row in db_res: + outputs: Union[Dict[str, str], List[str]] = json.loads(row[2]) + if isinstance(outputs, dict): + messages: Iterable[str] = outputs.values() + else: + # Cylc 8 pre 8.3.0 back-compat: list of output messages + messages = outputs + if warn_output_fallback: + print(f"WARNING - {output_fallback_msg}", file=sys.stderr) + warn_output_fallback = False + if ( + selector is None or + (is_message and selector in messages) or + (is_trigger and self._selector_in_outputs(selector, outputs)) + ): + results.append(row[:2] + [str(outputs)] + row[3:]) + + return results @staticmethod - def validate_mask(mask): - fieldnames = ["name", "status", "cycle"] # extract from rundb.py? - return all( - term.strip(' ') in fieldnames - for term in mask.split(',') + def _selector_in_outputs(selector: str, outputs: Iterable[str]) -> bool: + """Check if a selector, including "finished", is in the outputs. + + Examples: + >>> this = CylcWorkflowDBChecker._selector_in_outputs + >>> this('moop', ['started', 'moop']) + True + >>> this('moop', ['started']) + False + >>> this('finished', ['succeeded']) + True + >>> this('finish', ['failed']) + True + """ + return selector in outputs or ( + selector in (TASK_OUTPUT_FINISHED, "finish") + and ( + TASK_OUTPUT_SUCCEEDED in outputs + or TASK_OUTPUT_FAILED in outputs + ) ) diff --git a/cylc/flow/etc/examples/event-driven-cycling/.validate b/cylc/flow/etc/examples/event-driven-cycling/.validate index ef224cd9e2c..54e4db0a74c 100755 --- a/cylc/flow/etc/examples/event-driven-cycling/.validate +++ b/cylc/flow/etc/examples/event-driven-cycling/.validate @@ -27,12 +27,7 @@ sleep 1 # give it a reasonable chance to start up ./bin/trigger "$ID" WORLD=earth # wait for it to complete -cylc workflow-state "$ID" \ - --task=run \ - --point=1 \ - --status=succeeded \ - --max-polls=60 \ - --interval=1 +cylc workflow-state "$ID//1/run:succeeded" --max-polls=60 --interval=1 # check the job received the environment variable we provided grep 'Hello earth' "$HOME/cylc-run/$ID/log/job/1/run/NN/job.out" diff --git a/cylc/flow/etc/examples/inter-workflow-triggers/.validate b/cylc/flow/etc/examples/inter-workflow-triggers/.validate index bdd414e275d..ef21cb95ae4 100755 --- a/cylc/flow/etc/examples/inter-workflow-triggers/.validate +++ b/cylc/flow/etc/examples/inter-workflow-triggers/.validate @@ -37,12 +37,7 @@ cylc vip \ ./downstream # wait for the first task in the downstream to succeed -cylc workflow-state "$DOID" \ - --task=process \ - --point="$ICP" \ - --status=succeeded \ - --max-polls=60 \ - --interval=1 +cylc workflow-state "$DOID//$ICP/process:succeeded" --max-polls=60 --interval=1 # stop the workflows cylc stop --kill --max-polls=10 --interval=2 "$UPID" diff --git a/cylc/flow/etc/examples/inter-workflow-triggers/downstream/flow.cylc b/cylc/flow/etc/examples/inter-workflow-triggers/downstream/flow.cylc index 115ecd94755..45d88eb61b5 100644 --- a/cylc/flow/etc/examples/inter-workflow-triggers/downstream/flow.cylc +++ b/cylc/flow/etc/examples/inter-workflow-triggers/downstream/flow.cylc @@ -10,7 +10,7 @@ [[xtriggers]] # this is an "xtrigger" - it will wait for the task "b" in the same # cycle from the workflow "upstream" - upstream = workflow_state(workflow="inter-workflow-triggers/upstream", task="b", point="%(point)s") + upstream = workflow_state(workflow_task_id="inter-workflow-triggers/upstream//%(point)s/b") [[graph]] PT1H = """ @upstream => process diff --git a/cylc/flow/exceptions.py b/cylc/flow/exceptions.py index 7235455cace..802cfaaa9cd 100644 --- a/cylc/flow/exceptions.py +++ b/cylc/flow/exceptions.py @@ -242,12 +242,13 @@ class XtriggerConfigError(WorkflowConfigError): """ - def __init__(self, label: str, message: str): - self.label: str = label - self.message: str = message + def __init__(self, label: str, func: str, message: Union[str, Exception]): + self.label = label + self.func = func + self.message = message def __str__(self) -> str: - return f'[@{self.label}] {self.message}' + return f'[@{self.label}] {self.func}\n{self.message}' class ClientError(CylcError): diff --git a/cylc/flow/graph_parser.py b/cylc/flow/graph_parser.py index 64dcdecaf6f..efbce16fb36 100644 --- a/cylc/flow/graph_parser.py +++ b/cylc/flow/graph_parser.py @@ -380,11 +380,10 @@ def parse_graph(self, graph_string: str) -> None: full_line = self.__class__.REC_WORKFLOW_STATE.sub(repl, full_line) for item in repl.match_groups: l_task, r_all, r_workflow, r_task, r_status = item - if r_status: - r_status = r_status.strip(self.__class__.QUALIFIER) - r_status = TaskTrigger.standardise_name(r_status) - else: - r_status = TASK_OUTPUT_SUCCEEDED + if r_status is not None: + r_status = TaskTrigger.standardise_name( + r_status.strip(self.__class__.QUALIFIER) + ) self.workflow_state_polling_tasks[l_task] = ( r_workflow, r_task, r_status, r_all ) diff --git a/cylc/flow/option_parsers.py b/cylc/flow/option_parsers.py index f058a684f77..a4ef2d97b3d 100644 --- a/cylc/flow/option_parsers.py +++ b/cylc/flow/option_parsers.py @@ -51,6 +51,7 @@ OPT_WORKFLOW_ID_ARG_DOC = ('[WORKFLOW]', 'Workflow ID') WORKFLOW_ID_MULTI_ARG_DOC = ('WORKFLOW ...', 'Workflow ID(s)') WORKFLOW_ID_OR_PATH_ARG_DOC = ('WORKFLOW | PATH', 'Workflow ID or path') +ID_SEL_ARG_DOC = ('ID[:sel]', 'WORKFLOW-ID[[//CYCLE[/TASK]]:selector]') ID_MULTI_ARG_DOC = ('ID ...', 'Workflow/Cycle/Family/Task ID(s)') FULL_ID_MULTI_ARG_DOC = ('ID ...', 'Cycle/Family/Task ID(s)') @@ -290,9 +291,15 @@ class CylcOptionParser(OptionParser): ['--debug'], help='Equivalent to -v -v', dest='verbosity', action='store_const', const=2, useif='all'), OptionSettings( - ['--no-timestamp'], help='Don\'t timestamp logged messages.', - action='store_false', dest='log_timestamp', - default=True, useif='all'), + ['--timestamp'], + help='Add a timestamp to messages logged to the terminal.', + action='store_true', dest='log_timestamp', + default=False, useif='all'), + OptionSettings( + ['--no-timestamp'], help="Don't add a timestamp to messages logged" + " to the terminal (this does nothing - it is now the default.", + action='store_false', dest='_noop', + default=False, useif='all'), OptionSettings( ['--color', '--colour'], metavar='WHEN', action='store', default='auto', choices=['never', 'auto', 'always'], diff --git a/cylc/flow/parsec/upgrade.py b/cylc/flow/parsec/upgrade.py index 62f3e2e8d0d..0f75732f6d5 100644 --- a/cylc/flow/parsec/upgrade.py +++ b/cylc/flow/parsec/upgrade.py @@ -194,6 +194,8 @@ def expand(self, upg): def upgrade(self): warnings = OrderedDict() + deprecations = False + obsoletions = False for vn, upgs in self.upgrades.items(): for u in upgs: try: @@ -212,6 +214,9 @@ def upgrade(self): if upg['new']: msg += ' -> ' + self.show_keys(upg['new'], upg['is_section']) + deprecations = True + else: + obsoletions = True msg += " - " + upg['cvt'].describe().format( old=old, new=upg['cvt'].convert(old) @@ -236,7 +241,6 @@ def upgrade(self): self.put_item(upg['new'], upg['cvt'].convert(old)) if warnings: - level = WARNING if self.descr == self.SITE_CONFIG: # Site level configuration, user cannot easily fix. # Only log at debug level. @@ -245,9 +249,19 @@ def upgrade(self): # User level configuration, user should be able to fix. # Log at warning level. level = WARNING - LOG.log(level, - 'deprecated items were automatically upgraded in ' - f'"{self.descr}"') + if obsoletions: + LOG.log( + level, + "Obsolete config items were automatically deleted." + " Please check your workflow and remove them permanently." + ) + if deprecations: + LOG.log( + level, + "Deprecated config items were automatically upgraded." + " Please alter your workflow to use the new syntax." + ) + for vn, msgs in warnings.items(): for msg in msgs: LOG.log(level, ' * (%s) %s', vn, msg) diff --git a/cylc/flow/parsec/validate.py b/cylc/flow/parsec/validate.py index 29e2c8a59c9..18c19596a63 100644 --- a/cylc/flow/parsec/validate.py +++ b/cylc/flow/parsec/validate.py @@ -1136,7 +1136,7 @@ def coerce_xtrigger(cls, value, keys): @classmethod def _coerce_type(cls, value): - """Convert value to int, float, or bool, if possible. + """Convert value to int, float, bool, or None, if possible. Examples: >>> CylcConfigValidator._coerce_type('1') @@ -1147,6 +1147,7 @@ def _coerce_type(cls, value): True >>> CylcConfigValidator._coerce_type('abc') 'abc' + >>> CylcConfigValidator._coerce_type('None') """ try: @@ -1159,6 +1160,8 @@ def _coerce_type(cls, value): val = False elif value == 'True': val = True + elif value == 'None': + val = None else: # Leave as string. val = cls.strip_and_unquote([], value) diff --git a/cylc/flow/rundb.py b/cylc/flow/rundb.py index 78e1813d45b..70bca7c8354 100644 --- a/cylc/flow/rundb.py +++ b/cylc/flow/rundb.py @@ -23,6 +23,7 @@ import traceback from typing import ( TYPE_CHECKING, + Dict, Iterable, List, Set, @@ -33,11 +34,12 @@ from cylc.flow import LOG from cylc.flow.exceptions import PlatformLookupError -from cylc.flow.util import deserialise +from cylc.flow.util import deserialise_set import cylc.flow.flags if TYPE_CHECKING: from pathlib import Path + from cylc.flow.flow_mgr import FlowNums @dataclass @@ -790,7 +792,7 @@ def select_prev_instances( ( submit_num, flow_wait == 1, - deserialise(flow_nums_str), + deserialise_set(flow_nums_str), status ) for flow_nums_str, submit_num, flow_wait, status in ( @@ -804,12 +806,14 @@ def select_latest_flow_nums(self): SELECT flow_nums, MAX(time_created) FROM {self.TABLE_TASK_STATES} ''' # nosec (table name is code constant) flow_nums_str = list(self.connect().execute(stmt))[0][0] - return deserialise(flow_nums_str) + return deserialise_set(flow_nums_str) - def select_task_outputs(self, name, point): + def select_task_outputs( + self, name: str, point: str + ) -> 'Dict[str, FlowNums]': """Select task outputs for each flow. - Return: {outputs_list: flow_nums_set} + Return: {outputs_dict_str: flow_nums_set} """ stmt = rf''' @@ -820,10 +824,12 @@ def select_task_outputs(self, name, point): WHERE name==? AND cycle==? ''' # nosec (table name is code constant) - ret = {} - for flow_nums, outputs in self.connect().execute(stmt, (name, point,)): - ret[outputs] = deserialise(flow_nums) - return ret + return { + outputs: deserialise_set(flow_nums) + for flow_nums, outputs in self.connect().execute( + stmt, (name, point,) + ) + } def select_xtriggers_for_restart(self, callback): stmt = rf''' diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index 8074bfbce8a..ff593648e7b 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -1057,7 +1057,6 @@ def load_flow_file(self, is_reload=False): self.flow_file, self.options, self.template_vars, - xtrigger_mgr=self.xtrigger_mgr, mem_log_func=self.profiler.log_memory, output_fname=os.path.join( self.workflow_run_dir, 'log', 'config', diff --git a/cylc/flow/scripts/function_run.py b/cylc/flow/scripts/function_run.py index 63029a97782..f517963d708 100755 --- a/cylc/flow/scripts/function_run.py +++ b/cylc/flow/scripts/function_run.py @@ -14,13 +14,12 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""USAGE: cylc function-run +"""USAGE: cylc function-run (This command is for internal use.) Run a Python xtrigger function "(*args, **kwargs)" in the process pool. -It must be in a module of the same name. Positional and keyword arguments must -be passed in as JSON strings. +Positional and keyword arguments must be passed in as JSON strings. Python entry points are the preferred way to make xtriggers available to the scheduler, but local xtriggers can be stored in . @@ -38,7 +37,7 @@ def main(*api_args): args = [None] + list(api_args) else: args = sys.argv - if args[1] in ["help", "--help"] or len(args) != 5: + if args[1] in ["help", "--help"] or len(args) != 6: print(__doc__) sys.exit(0) - run_function(args[1], args[2], args[3], args[4]) + run_function(*args[1:]) diff --git a/cylc/flow/scripts/validate_install_play.py b/cylc/flow/scripts/validate_install_play.py index 9343289f987..d701eb02315 100644 --- a/cylc/flow/scripts/validate_install_play.py +++ b/cylc/flow/scripts/validate_install_play.py @@ -86,7 +86,7 @@ def get_option_parser() -> COP: # no sense in a VIP context. if option.kwargs.get('dest') != 'against_source': parser.add_option(*option.args, **option.kwargs) - + parser.set_defaults(is_validate=True) return parser @@ -103,6 +103,9 @@ def main(parser: COP, options: 'Values', workflow_id: Optional[str] = None): log_subcommand('validate', source) asyncio.run(cylc_validate(parser, options, str(source))) + # Unset is validate after validation. + delattr(options, 'is_validate') + log_subcommand('install', source) _, workflow_id = asyncio.run(cylc_install(options, workflow_id)) diff --git a/cylc/flow/scripts/validate_reinstall.py b/cylc/flow/scripts/validate_reinstall.py index 0733512ddad..de1be6dea82 100644 --- a/cylc/flow/scripts/validate_reinstall.py +++ b/cylc/flow/scripts/validate_reinstall.py @@ -97,6 +97,7 @@ def get_option_parser() -> COP: ) for option in VR_OPTIONS: parser.add_option(*option.args, **option.kwargs) + parser.set_defaults(is_validate=True) return parser @@ -169,6 +170,9 @@ async def vr_cli(parser: COP, options: 'Values', workflow_id: str): log_subcommand('validate --against-source', workflow_id) await cylc_validate(parser, options, workflow_id) + # Unset is validate after validation. + delattr(options, 'is_validate') + log_subcommand('reinstall', workflow_id) reinstall_ok = await cylc_reinstall( options, workflow_id, diff --git a/cylc/flow/scripts/workflow_state.py b/cylc/flow/scripts/workflow_state.py index ea544771c2d..f6350110223 100755 --- a/cylc/flow/scripts/workflow_state.py +++ b/cylc/flow/scripts/workflow_state.py @@ -18,252 +18,434 @@ r"""cylc workflow-state [OPTIONS] ARGS -Retrieve task states from the workflow database. +Check or poll a workflow database for task statuses or completed outputs. -Print task states retrieved from a workflow database; or (with --task, ---point, and --status) poll until a given task reaches a given state; or (with ---task, --point, and --message) poll until a task receives a given message. -Polling is configurable with --interval and --max-polls; for a one-off -check use --max-polls=1. The workflow database does not need to exist at -the time polling commences but allocated polls are consumed waiting for -it (consider max-polls*interval as an overall timeout). +The ID argument can target a workflow, or a cycle point, or a specific +task, with an optional selector on cycle or task to match task status, +output trigger (if not a status, or with --trigger) or output message +(with --message). All matching results will be printed. -Note for non-cycling tasks --point=1 must be provided. +If no results match, the command will repeatedly check (poll) until a match +is found or polling is exhausted (see --max-polls and --interval). For a +one-off check set --max-polls=1. -For your own workflows the database location is determined by your -site/user config. For other workflows, e.g. those owned by others, or -mirrored workflow databases, use --run-dir=DIR to specify the location. +If the database does not exist at first, polls are consumed waiting for it +so you can start checking before the target workflow is started. + +Legacy (pre-8.3.0) options are supported, but deprecated, for existing scripts: + cylc workflow-state --task=NAME --point=CYCLE --status=STATUS + --output=MESSAGE --message=MESSAGE --task-point WORKFLOW +(Note from 8.0 until 8.3.0 --output and --message both match task messages). + +In "cycle/task:selector" the selector will match task statuses, unless: + - if it is not a known status, it will match task output triggers + (Cylc 8 DB) or task ouput messages (Cylc 7 DB) + - with --triggers, it will only match task output triggers + - with --messages (deprecated), it will only match task output messages. + Triggers are more robust - they match manually and naturally set outputs. + +Selector does not default to "succeeded". If omitted, any status will match. + +The "finished" pseudo-output is an alias for "succeeded or failed". + +In the ID, both cycle and task can include "*" to match any sequence of zero +or more characters. Quote the pattern to protect it from shell expansion. + +Note tasks get recorded in the DB once they enter the active window (n=0). + +Flow numbers are only printed for flow numbers > 1. + +USE IN TASK SCRIPTING: + - To poll a task at the same cycle point in another workflow, just use + $CYLC_TASK_CYCLE_POINT in the ID. + - To poll a task at an offset cycle point, use the --offset option to + have Cylc do the datetime arithmetic for you. + - However, see also the workflow_state xtrigger for this use case. + +WARNINGS: + - Typos in the workflow or task ID will result in fruitless polling. + - To avoid missing transient states ("submitted", "running") poll for the + corresponding output trigger instead ("submitted", "started"). + - Cycle points are auto-converted to the DB point format (and UTC mode). + - Task outputs manually completed by "cylc set" have "(force-completed)" + recorded as the task message in the DB, so it is best to query trigger + names, not messages, unless specifically interested in forced outputs. Examples: - $ cylc workflow-state WORKFLOW_ID --task=TASK --point=POINT --status=STATUS - # returns 0 if TASK.POINT reaches STATUS before the maximum number of - # polls, otherwise returns 1. - - $ cylc workflow-state WORKFLOW_ID --task=TASK --point=POINT --status=STATUS \ - > --offset=PT6H - # adds 6 hours to the value of CYCLE for carrying out the polling operation. - - $ cylc workflow-state WORKFLOW_ID --task=TASK --status=STATUS --task-point - # uses CYLC_TASK_CYCLE_POINT environment variable as the value for the - # CYCLE to poll. This is useful when you want to use cylc workflow-state in a - # cylc task. + + # Print the status of all tasks in WORKFLOW: + $ cylc workflow-state WORKFLOW + + # Print the status of all tasks in cycle point 2033: + $ cylc workflow-state WORKFLOW//2033 + + # Print the status of all tasks named foo: + $ cylc workflow-state "WORKFLOW//*/foo" + + # Print all succeeded tasks: + $ cylc workflow-state "WORKFLOW//*/*:succeeded" + + # Print all tasks foo that completed output (trigger name) file1: + $ cylc workflow-state "WORKFLOW//*/foo:file1" + + # Print if task 2033/foo completed output (trigger name) file1: + $ cylc workflow-state WORKFLOW//2033/foo:file1 + +See also: + - the workflow_state xtrigger, for state polling within workflows + - "cylc dump -t", to query a scheduler for current statuses + - "cylc show", to query a scheduler for task prerequisites and outputs """ import asyncio import os import sqlite3 import sys -from time import sleep -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, List, Optional -from cylc.flow.exceptions import CylcError, InputError -import cylc.flow.flags +from cylc.flow.pathutil import get_cylc_run_dir +from cylc.flow.id import Tokens +from cylc.flow.exceptions import InputError from cylc.flow.option_parsers import ( - WORKFLOW_ID_ARG_DOC, + ID_SEL_ARG_DOC, CylcOptionParser as COP, ) -from cylc.flow.dbstatecheck import CylcWorkflowDBChecker +from cylc.flow import LOG from cylc.flow.command_polling import Poller -from cylc.flow.task_state import TASK_STATUSES_ORDERED +from cylc.flow.dbstatecheck import CylcWorkflowDBChecker from cylc.flow.terminal import cli_function -from cylc.flow.cycling.util import add_offset -from cylc.flow.pathutil import get_cylc_run_dir from cylc.flow.workflow_files import infer_latest_run_from_id - -from metomi.isodatetime.parsers import TimePointParser +from cylc.flow.task_state import TASK_STATUSES_ORDERED if TYPE_CHECKING: from optparse import Values +WILDCARD = "*" + +# polling defaults +MAX_POLLS = 12 +INTERVAL = 5 + +OPT_DEPR_MSG = "DEPRECATED, use ID" +OPT_DEPR_MSG1 = 'DEPRECATED, use "ID:STATUS"' +OPT_DEPR_MSG2 = 'DEPRECATED, use "ID:MSG"' + + +def unquote(s: str) -> str: + """Remove leading & trailing quotes from a string. + + Examples: + >>> unquote('"foo"') + 'foo' + >>> unquote("'foo'") + 'foo' + >>> unquote('foo') + 'foo' + >>> unquote("'tis a fine morning") + "'tis a fine morning" + """ + if ( + s.startswith('"') and s.endswith('"') + or s.startswith("'") and s.endswith("'") + ): + return s[1:-1] + return s + + class WorkflowPoller(Poller): - """A polling object that checks workflow state.""" - - def connect(self): - """Connect to the workflow db, polling if necessary in case the - workflow has not been started up yet.""" - - # Returns True if connected, otherwise (one-off failed to - # connect, or max number of polls exhausted) False - connected = False - - if cylc.flow.flags.verbosity > 0: - sys.stderr.write( - "connecting to workflow db for " + - self.args['run_dir'] + "/" + self.args['workflow_id']) - - # Attempt db connection even if no polls for condition are - # requested, as failure to connect is useful information. - max_polls = self.max_polls or 1 - # max_polls*interval is equivalent to a timeout, and we - # include time taken to connect to the run db in this... - while not connected: - self.n_polls += 1 + """An object that polls for task states or outputs in a workflow DB.""" + + def __init__( + self, + id_: str, + offset: Optional[str], + flow_num: Optional[int], + alt_cylc_run_dir: Optional[str], + default_status: Optional[str], + is_trigger: bool, + is_message: bool, + old_format: bool = False, + pretty_print: bool = False, + **kwargs + ): + self.id_ = id_ + self.offset = offset + self.flow_num = flow_num + self.alt_cylc_run_dir = alt_cylc_run_dir + self.old_format = old_format + self.pretty_print = pretty_print + + try: + tokens = Tokens(self.id_) + except ValueError as exc: + raise InputError(exc) + + self.workflow_id_raw = tokens.workflow_id + self.selector = ( + tokens["cycle_sel"] or + tokens["task_sel"] or + default_status + ) + if self.selector: + self.selector = unquote(self.selector) + self.cycle_raw = tokens["cycle"] + self.task = tokens["task"] + + self.workflow_id: Optional[str] = None + self.cycle: Optional[str] = None + self.result: Optional[List[List[str]]] = None + self._db_checker: Optional[CylcWorkflowDBChecker] = None + + self.is_message = is_message + if is_message: + self.is_trigger = False + else: + self.is_trigger = ( + is_trigger or + ( + self.selector is not None and + self.selector not in TASK_STATUSES_ORDERED + ) + ) + super().__init__(**kwargs) + + def _find_workflow(self) -> bool: + """Find workflow and infer run directory, return True if found.""" + try: + self.workflow_id = infer_latest_run_from_id( + self.workflow_id_raw, + self.alt_cylc_run_dir + ) + except InputError: + LOG.debug("Workflow not found") + return False + + if self.workflow_id != self.workflow_id_raw: + # Print inferred ID. + sys.stderr.write(f"Inferred workflow ID: {self.workflow_id}\n") + return True + + @property + def db_checker(self) -> Optional[CylcWorkflowDBChecker]: + """Connect to workflow DB if not already connected. + + Returns DB checker if connected. + """ + if not self._db_checker: try: - self.checker = CylcWorkflowDBChecker( - self.args['run_dir'], self.args['workflow_id']) - connected = True - # ... but ensure at least one poll after connection: - self.n_polls -= 1 + self._db_checker = CylcWorkflowDBChecker( + get_cylc_run_dir(self.alt_cylc_run_dir), + self.workflow_id + ) except (OSError, sqlite3.Error): - if self.n_polls >= max_polls: - raise - if cylc.flow.flags.verbosity > 0: - sys.stderr.write('.') - sleep(self.interval) - if cylc.flow.flags.verbosity > 0: - sys.stderr.write('\n') - - if connected and self.args['cycle']: - try: - fmt = self.checker.get_remote_point_format() - except sqlite3.OperationalError as exc: - try: - fmt = self.checker.get_remote_point_format_compat() - except sqlite3.OperationalError: - raise exc # original error - if fmt: - my_parser = TimePointParser() - my_point = my_parser.parse(self.args['cycle'], dump_format=fmt) - self.args['cycle'] = str(my_point) - return connected, self.args['cycle'] - - async def check(self): - """Return True if desired workflow state achieved, else False""" - return self.checker.task_state_met( - self.args['task'], self.args['cycle'], - self.args['status'], self.args['message']) + LOG.debug("DB not connected") + return None + + return self._db_checker + + async def check(self) -> bool: + """Return True if requested state achieved, else False. + + Called once per poll by super() so only find and connect once. + + Store self.result for external access. + + """ + if self.workflow_id is None and not self._find_workflow(): + return False + + if self.db_checker is None: + return False + + if self.cycle is None: + # Adjust target cycle point to the DB format. + self.cycle = self.db_checker.adjust_point_to_db( + self.cycle_raw, self.offset) + + self.result = self.db_checker.workflow_state_query( + self.task, self.cycle, self.selector, self.is_trigger, + self.is_message, self.flow_num + ) + if self.result: + # End the polling dot stream and print inferred runN workflow ID. + self.db_checker.display_maps( + self.result, self.old_format, self.pretty_print) + + return bool(self.result) def get_option_parser() -> COP: parser = COP( __doc__, - argdoc=[WORKFLOW_ID_ARG_DOC] + argdoc=[ID_SEL_ARG_DOC] ) + # --run-dir for pre-8.3.0 back-compat parser.add_option( - "-t", "--task", help="Specify a task to check the state of.", - action="store", dest="task", default=None) + "-d", "--alt-cylc-run-dir", "--run-dir", + help="Alternate cylc-run directory, e.g. for other users' workflows.", + metavar="DIR", action="store", dest="alt_cylc_run_dir", default=None) parser.add_option( - "-p", "--point", - help="Specify the cycle point to check task states for.", - action="store", dest="cycle", default=None) + "-s", "--offset", + help="Offset from ID cycle point as an ISO8601 duration for datetime" + " cycling (e.g. 'PT30M' for 30 minutes) or an integer interval for" + " integer cycling (e.g. 'P2'). This can be used in task job scripts" + " to poll offset cycle points without doing the cycle arithmetic" + " yourself - but see also the workflow_state xtrigger.", + action="store", dest="offset", metavar="DURATION", default=None) parser.add_option( - "-T", "--task-point", - help="Use the CYLC_TASK_CYCLE_POINT environment variable as the " - "cycle point to check task states for. " - "Shorthand for --point=$CYLC_TASK_CYCLE_POINT", - action="store_true", dest="use_task_point", default=False) + "--flow", + help="Flow number, for target tasks. By default, any flow.", + action="store", type="int", dest="flow_num", default=None) parser.add_option( - "-d", "--run-dir", - help="The top level cylc run directory if non-standard. The " - "database should be DIR/WORKFLOW_ID/log/db. Use to interrogate " - "workflows owned by others, etc.; see note above.", - metavar="DIR", action="store", dest="alt_run_dir", default=None) + "--triggers", + help="Task selector should match output triggers rather than status." + " (Note this is not needed for custom outputs).", + action="store_true", dest="is_trigger", default=False) parser.add_option( - "-s", "--offset", - help="Specify an offset to add to the targeted cycle point", - action="store", dest="offset", default=None) - - conds = ("Valid triggering conditions to check for include: '" + - ("', '").join( - sorted(CylcWorkflowDBChecker.STATE_ALIASES.keys())[:-1]) + - "' and '" + sorted( - CylcWorkflowDBChecker.STATE_ALIASES.keys())[-1] + "'. ") - states = ("Valid states to check for include: '" + - ("', '").join(TASK_STATUSES_ORDERED[:-1]) + - "' and '" + TASK_STATUSES_ORDERED[-1] + "'.") + "--messages", + help="Task selector should match output messages rather than status.", + action="store_true", dest="is_message", default=False) + + parser.add_option( + "--pretty", + help="Pretty-print outputs (the default is single-line output).", + action="store_true", dest="pretty_print", default=False) + + parser.add_option( + "--old-format", + help="Print results in legacy comma-separated format.", + action="store_true", dest="old_format", default=False) + + # Back-compat support for pre-8.3.0 command line options. + parser.add_option( + "-t", "--task", help=f"Task name. {OPT_DEPR_MSG}.", + metavar="NAME", + action="store", dest="depr_task", default=None) + + parser.add_option( + "-p", "--point", metavar="CYCLE", + help=f"Cycle point. {OPT_DEPR_MSG}.", + action="store", dest="depr_point", default=None) + + parser.add_option( + "-T", "--task-point", + help="Get cycle point from the environment variable" + " $CYLC_TASK_CYCLE_POINT (e.g. in task job scripts)", + action="store_true", dest="depr_env_point", default=False) parser.add_option( "-S", "--status", - help="Specify a particular status or triggering condition to " - f"check for. {conds}{states}", - action="store", dest="status", default=None) + metavar="STATUS", + help=f"Task status. {OPT_DEPR_MSG1}.", + action="store", dest="depr_status", default=None) + # Prior to 8.3.0 --output was just an alias for --message parser.add_option( "-O", "--output", "-m", "--message", - help="Check custom task output by message string or trigger string.", - action="store", dest="msg", default=None) - - WorkflowPoller.add_to_cmd_options(parser) + metavar="MSG", + help=f"Task output message. {OPT_DEPR_MSG2}.", + action="store", dest="depr_msg", default=None) + + WorkflowPoller.add_to_cmd_options( + parser, + d_interval=INTERVAL, + d_max_polls=MAX_POLLS + ) return parser @cli_function(get_option_parser, remove_opts=["--db"]) -def main(parser: COP, options: 'Values', workflow_id: str) -> None: - - if options.use_task_point and options.cycle: - raise InputError( - "cannot specify a cycle point and use environment variable") - - if options.use_task_point: - if "CYLC_TASK_CYCLE_POINT" not in os.environ: - raise InputError("CYLC_TASK_CYCLE_POINT is not defined") - options.cycle = os.environ["CYLC_TASK_CYCLE_POINT"] - - if options.offset and not options.cycle: - raise InputError( - "You must target a cycle point to use an offset") - - # Attempt to apply specified offset to the targeted cycle - if options.offset: - options.cycle = str(add_offset(options.cycle, options.offset)) - - # Exit if both task state and message are to being polled - if options.status and options.msg: - raise InputError("cannot poll both status and custom output") - - if options.msg and not options.task and not options.cycle: - raise InputError("need a taskname and cyclepoint") - - # Exit if an invalid status is requested - if (options.status and - options.status not in TASK_STATUSES_ORDERED and - options.status not in CylcWorkflowDBChecker.STATE_ALIASES): - raise InputError(f"invalid status '{options.status}'") - - workflow_id = infer_latest_run_from_id(workflow_id, options.alt_run_dir) - - pollargs = { - 'workflow_id': workflow_id, - 'run_dir': get_cylc_run_dir(alt_run_dir=options.alt_run_dir), - 'task': options.task, - 'cycle': options.cycle, - 'status': options.status, - 'message': options.msg, - } - - spoller = WorkflowPoller( - "requested state", - options.interval, - options.max_polls, - args=pollargs, +def main(parser: COP, options: 'Values', *ids: str) -> None: + + # Note it would be cleaner to use 'id_cli.parse_ids()' here to get the + # workflow ID and tokens, but that function infers run number and fails + # if the workflow is not installed yet. We want to be able to start polling + # before the workflow is installed, which makes it easier to get a set of + # interdependent workflows up and running, so runN inference is done inside + # the poller. TODO: consider using id_cli.parse_ids inside the poller. + # (Note this applies to polling tasks, which use the CLI, not xtriggers). + + id_ = ids[0].rstrip('/') # might get 'id/' due to autcomplete + + if any( + [ + options.depr_task, + options.depr_status, + options.depr_msg, # --message and --trigger + options.depr_point, + options.depr_env_point + ] + ): + depr_opts = ( + "--task, --status, --message, --output, --point, --task-point" + ) + + if id_ != Tokens(id_)["workflow"]: + raise InputError( + f"with deprecated {depr_opts}, the argument must be a" + " plain workflow ID (i.e. no cycle, task, or :selector)." + ) + + if options.depr_status and options.depr_msg: + raise InputError("set --status or --message, not both.") + + if options.depr_env_point: + if options.depr_point: + raise InputError( + "set --task-point or --point=CYCLE, not both.") + try: + options.depr_point = os.environ["CYLC_TASK_CYCLE_POINT"] + except KeyError: + raise InputError( + "--task-point: $CYLC_TASK_CYCLE_POINT is not defined") + + if options.depr_point is not None: + id_ += f"//{options.depr_point}" + elif ( + options.depr_task is not None or + options.depr_status is not None or + options.depr_msg is not None + ): + id_ += "//*" + if options.depr_task is not None: + id_ += f"/{options.depr_task}" + if options.depr_status is not None: + id_ += f":{options.depr_status}" + elif options.depr_msg is not None: + id_ += f":{options.depr_msg}" + options.is_message = True + + msg = f"{depr_opts} are deprecated. Please use an ID: " + if not options.depr_env_point: + msg += id_ + else: + msg += id_.replace(options.depr_point, "$CYLC_TASK_CYCLE_POINT") + LOG.warning(msg) + + poller = WorkflowPoller( + id_, + options.offset, + options.flow_num, + options.alt_cylc_run_dir, + default_status=None, + is_trigger=options.is_trigger, + is_message=options.is_message, + old_format=options.old_format, + pretty_print=options.pretty_print, + condition=id_, + interval=options.interval, + max_polls=options.max_polls, + args=None ) - connected, formatted_pt = spoller.connect() - - if not connected: - raise CylcError(f"Cannot connect to the {workflow_id} DB") - - if options.status and options.task and options.cycle: - # check a task status - spoller.condition = options.status - if not asyncio.run(spoller.poll()): - sys.exit(1) - elif options.msg: - # Check for a custom task output - spoller.condition = "output: %s" % options.msg - if not asyncio.run(spoller.poll()): - sys.exit(1) - else: - # just display query results - spoller.checker.display_maps( - spoller.checker.workflow_state_query( - task=options.task, - cycle=formatted_pt, - status=options.status)) + if not asyncio.run( + poller.poll() + ): + sys.exit(1) diff --git a/cylc/flow/subprocctx.py b/cylc/flow/subprocctx.py index 45b4cb2095e..41b735a71c5 100644 --- a/cylc/flow/subprocctx.py +++ b/cylc/flow/subprocctx.py @@ -166,10 +166,12 @@ def __init__( func_name: str, func_args: List[Any], func_kwargs: Dict[str, Any], - intvl: Union[float, str] = DEFAULT_INTVL + intvl: Union[float, str] = DEFAULT_INTVL, + mod_name: Optional[str] = None ): """Initialize a function context.""" self.label = label + self.mod_name = mod_name or func_name self.func_name = func_name self.func_kwargs = func_kwargs self.func_args = func_args @@ -186,7 +188,9 @@ def __init__( def update_command(self, workflow_run_dir): """Update the function wrap command after changes.""" - self.cmd = ['cylc', 'function-run', self.func_name, + self.cmd = ['cylc', 'function-run', + self.mod_name, + self.func_name, json.dumps(self.func_args), json.dumps(self.func_kwargs), workflow_run_dir] diff --git a/cylc/flow/subprocpool.py b/cylc/flow/subprocpool.py index 29a236b2f9c..864071332b8 100644 --- a/cylc/flow/subprocpool.py +++ b/cylc/flow/subprocpool.py @@ -126,7 +126,7 @@ def get_xtrig_func(mod_name, func_name, src_dir): return _XTRIG_FUNC_CACHE[(mod_name, func_name)] -def run_function(func_name, json_args, json_kwargs, src_dir): +def run_function(mod_name, func_name, json_args, json_kwargs, src_dir): """Run a Python function in the process pool. func_name(*func_args, **func_kwargs) @@ -142,7 +142,7 @@ def run_function(func_name, json_args, json_kwargs, src_dir): func_kwargs = json.loads(json_kwargs) # Find and import then function. - func = get_xtrig_func(func_name, func_name, src_dir) + func = get_xtrig_func(mod_name, func_name, src_dir) # Redirect stdout to stderr. orig_stdout = sys.stdout diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index 82960a312bf..0f89d2122dd 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -699,7 +699,9 @@ def process_message( completed_output: Optional[bool] = False if msg0 not in [TASK_OUTPUT_SUBMIT_FAILED, TASK_OUTPUT_FAILED]: - completed_output = itask.state.outputs.set_message_complete(msg0) + completed_output = ( + itask.state.outputs.set_message_complete(msg0, forced) + ) if completed_output: self.data_store_mgr.delta_task_output(itask, msg0) @@ -716,8 +718,8 @@ def process_message( if message == self.EVENT_STARTED: if ( - flag == self.FLAG_RECEIVED - and itask.state.is_gt(TASK_STATUS_RUNNING) + flag == self.FLAG_RECEIVED + and itask.state.is_gt(TASK_STATUS_RUNNING) ): # Already running. return True @@ -1280,7 +1282,7 @@ def _retry_task(self, itask, wallclock_time, submit_retry=False): [], kwargs ) - self.xtrigger_mgr.add_trig( + self.xtrigger_mgr.xtriggers.add_trig( label, xtrig, os.getenv("CYLC_WORKFLOW_RUN_DIR") diff --git a/cylc/flow/task_job_mgr.py b/cylc/flow/task_job_mgr.py index 4ac90d28aeb..185966ff12d 100644 --- a/cylc/flow/task_job_mgr.py +++ b/cylc/flow/task_job_mgr.py @@ -111,7 +111,7 @@ get_utc_mode ) from cylc.flow.cfgspec.globalcfg import SYSPATH -from cylc.flow.util import serialise +from cylc.flow.util import serialise_set if TYPE_CHECKING: from cylc.flow.task_proxy import TaskProxy @@ -443,7 +443,7 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, # Log and persist LOG.debug(f"[{itask}] host={host}") self.workflow_db_mgr.put_insert_task_jobs(itask, { - 'flow_nums': serialise(itask.flow_nums), + 'flow_nums': serialise_set(itask.flow_nums), 'is_manual_submit': itask.is_manual_submit, 'try_num': itask.get_try_num(), 'time_submit': get_current_time_string(), @@ -1272,7 +1272,7 @@ def _prep_submit_task_job_error(self, workflow, itask, action, exc): self.workflow_db_mgr.put_insert_task_jobs( itask, { - 'flow_nums': serialise(itask.flow_nums), + 'flow_nums': serialise_set(itask.flow_nums), 'job_id': itask.summary.get('submit_method_id'), 'is_manual_submit': itask.is_manual_submit, 'try_num': itask.get_try_num(), diff --git a/cylc/flow/task_outputs.py b/cylc/flow/task_outputs.py index a9af2d34dcc..39da3750b59 100644 --- a/cylc/flow/task_outputs.py +++ b/cylc/flow/task_outputs.py @@ -67,6 +67,9 @@ TASK_OUTPUT_FINISHED, ) +# DB output message for forced completion +FORCED_COMPLETION_MSG = "(manually completed)" + # this evaluates task completion expressions CompletionEvaluator = restricted_evaluator( # expressions @@ -296,23 +299,25 @@ class TaskOutputs: expression string. """ - __slots__ = ( "_message_to_trigger", "_message_to_compvar", "_completed", "_completion_expression", + "_forced", ) _message_to_trigger: Dict[str, str] # message: trigger _message_to_compvar: Dict[str, str] # message: completion variable _completed: Dict[str, bool] # message: is_complete _completion_expression: str + _forced: List[str] # list of messages of force-completed outputs def __init__(self, tdef: 'Union[TaskDef, str]'): self._message_to_trigger = {} self._message_to_compvar = {} self._completed = {} + self._forced = [] if isinstance(tdef, str): # abnormal use e.g. from the "cylc show" command @@ -341,7 +346,32 @@ def get_trigger(self, message: str) -> str: """Return the trigger associated with this message.""" return self._message_to_trigger[message] - def set_message_complete(self, message: str) -> Optional[bool]: + def set_trigger_complete( + self, trigger: str, forced=False + ) -> Optional[bool]: + """Set the provided output trigger as complete. + + Args: + trigger: + The task output trigger to satisfy. + + Returns: + True: + If the output was unset before. + False: + If the output was already set. + None + If the output does not apply. + + """ + trg_to_msg = { + v: k for k, v in self._message_to_trigger.items() + } + return self.set_message_complete(trg_to_msg[trigger], forced) + + def set_message_complete( + self, message: str, forced=False + ) -> Optional[bool]: """Set the provided task message as complete. Args: @@ -364,6 +394,8 @@ def set_message_complete(self, message: str) -> Optional[bool]: if self._completed[message] is False: # output was incomplete self._completed[message] = True + if forced: + self._forced.append(message) return True # output was already completed @@ -381,16 +413,23 @@ def is_message_complete(self, message: str) -> Optional[bool]: return self._completed[message] return None - def iter_completed_messages(self) -> Iterator[str]: - """A generator that yields completed messages. + def get_completed_outputs(self) -> Dict[str, str]: + """Return a dict {trigger: message} of completed outputs. - Yields: - message: A completed task message. + Replace message with "forced" if the output was forced. """ - for message, is_completed in self._completed.items(): - if is_completed: - yield message + def _get_msg(message): + if message in self._forced: + return FORCED_COMPLETION_MSG + else: + return message + + return { + self._message_to_trigger[message]: _get_msg(message) + for message, is_completed in self._completed.items() + if is_completed + } def __iter__(self) -> Iterator[Tuple[str, str, bool]]: """A generator that yields all outputs. diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index d37049f11ae..21893103f86 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -65,8 +65,8 @@ ) from cylc.flow.task_trigger import TaskTrigger from cylc.flow.util import ( - serialise, - deserialise + serialise_set, + deserialise_set ) from cylc.flow.wallclock import get_current_time_string from cylc.flow.platforms import get_platform @@ -123,6 +123,7 @@ def __init__( self.task_events_mgr: 'TaskEventsManager' = task_events_mgr self.task_events_mgr.spawn_func = self.spawn_on_output self.xtrigger_mgr: 'XtriggerManager' = xtrigger_mgr + self.xtrigger_mgr.add_xtriggers(self.config.xtrigger_collator) self.data_store_mgr: 'DataStoreMgr' = data_store_mgr self.flow_mgr: 'FlowMgr' = flow_mgr @@ -208,7 +209,7 @@ def db_add_new_flow_rows(self, itask: TaskProxy) -> None: "time_created": now, "time_updated": now, "status": itask.state.status, - "flow_nums": serialise(itask.flow_nums), + "flow_nums": serialise_set(itask.flow_nums), "flow_wait": itask.flow_wait, "is_manual_submit": itask.is_manual_submit } @@ -433,7 +434,7 @@ def check_task_output( self, cycle: str, task: str, - output: str, + output_msg: str, flow_nums: 'FlowNums', ) -> Union[str, bool]: """Returns truthy if the specified output is satisfied in the DB.""" @@ -443,10 +444,20 @@ def check_task_output( # loop through matching tasks if flow_nums.intersection(task_flow_nums): # this task is in the right flow - task_outputs = json.loads(task_outputs) + # BACK COMPAT: In Cylc >8.0.0,<8.3.0, only the task + # messages were stored in the DB as a list. + # from: 8.0.0 + # to: 8.3.0 + outputs: Union[ + Dict[str, str], List[str] + ] = json.loads(task_outputs) + messages = ( + outputs.values() if isinstance(outputs, dict) + else outputs + ) return ( 'satisfied from database' - if output in task_outputs + if output_msg in messages else False ) else: @@ -475,7 +486,7 @@ def load_db_task_pool_for_restart(self, row_idx, row): self.tokens, self.config.get_taskdef(name), get_point(cycle), - deserialise(flow_nums), + deserialise_set(flow_nums), status=status, is_held=is_held, submit_num=submit_num, @@ -483,7 +494,7 @@ def load_db_task_pool_for_restart(self, row_idx, row): flow_wait=bool(flow_wait), is_manual_submit=bool(is_manual_submit), sequential_xtrigger_labels=( - self.xtrigger_mgr.sequential_xtrigger_labels + self.xtrigger_mgr.xtriggers.sequential_xtrigger_labels ), ) @@ -538,14 +549,14 @@ def load_db_task_pool_for_restart(self, row_idx, row): # Update prerequisite satisfaction status from DB sat = {} - for prereq_name, prereq_cycle, prereq_output, satisfied in ( + for prereq_name, prereq_cycle, prereq_output_msg, satisfied in ( self.workflow_db_mgr.pri_dao.select_task_prerequisites( cycle, name, flow_nums, ) ): # Prereq satisfaction as recorded in the DB. sat[ - (prereq_cycle, prereq_name, prereq_output) + (prereq_cycle, prereq_name, prereq_output_msg) ] = satisfied if satisfied != '0' else False for itask_prereq in itask.state.prerequisites: @@ -557,12 +568,12 @@ def load_db_task_pool_for_restart(self, row_idx, row): # added to an already-spawned task before restart. # Look through task outputs to see if is has been # satisfied - prereq_cycle, prereq_task, prereq_output = key + prereq_cycle, prereq_task, prereq_output_msg = key itask_prereq.satisfied[key] = ( self.check_task_output( prereq_cycle, prereq_task, - prereq_output, + prereq_output_msg, itask.flow_nums, ) ) @@ -735,7 +746,8 @@ def get_or_spawn_task( if ntask is not None: is_xtrig_sequential = ntask.is_xtrigger_sequential elif any( - xtrig_label in self.xtrigger_mgr.sequential_xtrigger_labels + xtrig_label in ( + self.xtrigger_mgr.xtriggers.sequential_xtrigger_labels) for sequence, xtrig_labels in tdef.xtrig_labels.items() for xtrig_label in xtrig_labels if sequence.is_valid(point) @@ -1025,7 +1037,7 @@ def reload_taskdefs(self, config: 'WorkflowConfig') -> None: itask.flow_nums, itask.state.status, sequential_xtrigger_labels=( - self.xtrigger_mgr.sequential_xtrigger_labels + self.xtrigger_mgr.xtriggers.sequential_xtrigger_labels ), ) itask.copy_to_reload_successor( @@ -1350,14 +1362,15 @@ def spawn_on_output(self, itask, output, forced=False): with suppress(KeyError): children = itask.graph_children[output] + if itask.flow_wait and children: + LOG.warning( + f"[{itask}] not spawning on {output}: flow wait requested") + self.remove_if_complete(itask, output) + return + suicide = [] for c_name, c_point, is_abs in children: - if itask.flow_wait: - LOG.warning( - f"[{itask}] not spawning on {output}: flow wait requested") - continue - if is_abs: self.abs_outputs_done.add( (str(itask.point), itask.tdef.name, output)) @@ -1610,7 +1623,7 @@ def _get_task_history( return never_spawned, submit_num, prev_status, prev_flow_wait - def _load_historical_outputs(self, itask): + def _load_historical_outputs(self, itask: 'TaskProxy') -> None: """Load a task's historical outputs from the DB.""" info = self.workflow_db_mgr.pri_dao.select_task_outputs( itask.tdef.name, str(itask.point)) @@ -1620,8 +1633,22 @@ def _load_historical_outputs(self, itask): else: for outputs_str, fnums in info.items(): if itask.flow_nums.intersection(fnums): - for msg in json.loads(outputs_str): - itask.state.outputs.set_message_complete(msg) + # BACK COMPAT: In Cylc >8.0.0,<8.3.0, only the task + # messages were stored in the DB as a list. + # from: 8.0.0 + # to: 8.3.0 + outputs: Union[ + Dict[str, str], List[str] + ] = json.loads(outputs_str) + if isinstance(outputs, dict): + # {trigger: message} - match triggers, not messages. + # DB may record forced completion rather than message. + for trigger in outputs.keys(): + itask.state.outputs.set_trigger_complete(trigger) + else: + # [message] - always the full task message + for msg in outputs: + itask.state.outputs.set_message_complete(msg) def spawn_task( self, @@ -1762,22 +1789,14 @@ def _get_task_proxy_db_outputs( transient=transient, is_manual_submit=is_manual_submit, sequential_xtrigger_labels=( - self.xtrigger_mgr.sequential_xtrigger_labels + self.xtrigger_mgr.xtriggers.sequential_xtrigger_labels ), ) if itask is None: return None # Update it with outputs that were already completed. - info = self.workflow_db_mgr.pri_dao.select_task_outputs( - itask.tdef.name, str(itask.point)) - if not info: - # (Note still need this if task not run before) - self.db_add_new_flow_rows(itask) - for outputs_str, fnums in info.items(): - if flow_nums.intersection(fnums): - for msg in json.loads(outputs_str): - itask.state.outputs.set_message_complete(msg) + self._load_historical_outputs(itask) return itask def _standardise_prereqs( @@ -2161,7 +2180,7 @@ def force_trigger_tasks( flow_wait=flow_wait, submit_num=submit_num, sequential_xtrigger_labels=( - self.xtrigger_mgr.sequential_xtrigger_labels + self.xtrigger_mgr.xtriggers.sequential_xtrigger_labels ), ) if itask is None: diff --git a/cylc/flow/util.py b/cylc/flow/util.py index 8b8b613787a..74a11d0505a 100644 --- a/cylc/flow/util.py +++ b/cylc/flow/util.py @@ -148,18 +148,27 @@ def cli_format(cmd: List[str]): return ' '.join(cmd) -def serialise(flow_nums: set): - """Convert set to json. +def serialise_set(flow_nums: set) -> str: + """Convert set to json, sorted. + For use when a sorted result is needed for consistency. + Example: - >>> serialise({'3','2'}) + >>> serialise_set({'3','2'}) '["2", "3"]' -""" + + """ return json.dumps(sorted(flow_nums)) -def deserialise(flow_num_str: str): - """Converts string to set.""" +def deserialise_set(flow_num_str: str) -> set: + """Convert json string to set. + + Example: + >>> sorted(deserialise_set('[2, 3]')) + [2, 3] + + """ return set(json.loads(flow_num_str)) diff --git a/cylc/flow/workflow_db_mgr.py b/cylc/flow/workflow_db_mgr.py index 0a92e7312bf..8e57d19292b 100644 --- a/cylc/flow/workflow_db_mgr.py +++ b/cylc/flow/workflow_db_mgr.py @@ -40,7 +40,7 @@ from cylc.flow import __version__ as CYLC_VERSION from cylc.flow.wallclock import get_current_time_string, get_utc_mode from cylc.flow.exceptions import CylcError, ServiceFileError -from cylc.flow.util import serialise +from cylc.flow.util import serialise_set, deserialise_set if TYPE_CHECKING: from pathlib import Path @@ -48,6 +48,7 @@ from cylc.flow.scheduler import Scheduler from cylc.flow.task_pool import TaskPool from cylc.flow.task_events_mgr import EventKey + from cylc.flow.task_proxy import TaskProxy Version = Any # TODO: narrow down Any (should be str | int) after implementing type @@ -429,7 +430,7 @@ def put_update_task_state(self, itask): where_args = { "cycle": str(itask.point), "name": itask.tdef.name, - "flow_nums": serialise(itask.flow_nums), + "flow_nums": serialise_set(itask.flow_nums), } # Note tasks_states table rows are for latest submit_num only # (not one row per submit). @@ -451,7 +452,7 @@ def put_update_task_flow_wait(self, itask): where_args = { "cycle": str(itask.point), "name": itask.tdef.name, - "flow_nums": serialise(itask.flow_nums), + "flow_nums": serialise_set(itask.flow_nums), } self.db_updates_map.setdefault(self.TABLE_TASK_STATES, []) self.db_updates_map[self.TABLE_TASK_STATES].append( @@ -481,7 +482,7 @@ def put_task_pool(self, pool: 'TaskPool') -> None: prereq.satisfied.items() ): self.put_insert_task_prerequisites(itask, { - "flow_nums": serialise(itask.flow_nums), + "flow_nums": serialise_set(itask.flow_nums), "prereq_name": p_name, "prereq_cycle": p_cycle, "prereq_output": p_output, @@ -490,7 +491,7 @@ def put_task_pool(self, pool: 'TaskPool') -> None: self.db_inserts_map[self.TABLE_TASK_POOL].append({ "name": itask.tdef.name, "cycle": str(itask.point), - "flow_nums": serialise(itask.flow_nums), + "flow_nums": serialise_set(itask.flow_nums), "status": itask.state.status, "is_held": itask.state.is_held }) @@ -535,7 +536,7 @@ def put_task_pool(self, pool: 'TaskPool') -> None: where_args = { "cycle": str(itask.point), "name": itask.tdef.name, - "flow_nums": serialise(itask.flow_nums) + "flow_nums": serialise_set(itask.flow_nums) } self.db_updates_map.setdefault(self.TABLE_TASK_STATES, []) self.db_updates_map[self.TABLE_TASK_STATES].append( @@ -585,8 +586,8 @@ def put_insert_task_outputs(self, itask): CylcWorkflowDAO.TABLE_TASK_OUTPUTS, itask, { - "flow_nums": serialise(itask.flow_nums), - "outputs": json.dumps([]) + "flow_nums": serialise_set(itask.flow_nums), + "outputs": json.dumps({}) } ) @@ -628,21 +629,21 @@ def put_update_task_jobs(self, itask, set_args): self._put_update_task_x( CylcWorkflowDAO.TABLE_TASK_JOBS, itask, set_args) - def put_update_task_outputs(self, itask): + def put_update_task_outputs(self, itask: 'TaskProxy') -> None: """Put UPDATE statement for task_outputs table.""" set_args = { "outputs": json.dumps( - list(itask.state.outputs.iter_completed_messages()) + itask.state.outputs.get_completed_outputs() ) } where_args = { "cycle": str(itask.point), "name": itask.tdef.name, - "flow_nums": serialise(itask.flow_nums), + "flow_nums": serialise_set(itask.flow_nums), } - self.db_updates_map.setdefault(self.TABLE_TASK_OUTPUTS, []) - self.db_updates_map[self.TABLE_TASK_OUTPUTS].append( - (set_args, where_args)) + self.db_updates_map.setdefault(self.TABLE_TASK_OUTPUTS, []).append( + (set_args, where_args) + ) def _put_update_task_x(self, table_name, itask, set_args): """Put UPDATE statement for a task_* table.""" @@ -652,7 +653,7 @@ def _put_update_task_x(self, table_name, itask, set_args): if "submit_num" not in set_args: where_args["submit_num"] = itask.submit_num if "flow_nums" not in set_args: - where_args["flow_nums"] = serialise(itask.flow_nums) + where_args["flow_nums"] = serialise_set(itask.flow_nums) self.db_updates_map.setdefault(table_name, []) self.db_updates_map[table_name].append((set_args, where_args)) @@ -742,8 +743,7 @@ def upgrade_pre_810(pri_dao: CylcWorkflowDAO) -> None: # We can't upgrade if the flow_nums in task_states are not # distinct. - from cylc.flow.util import deserialise - flow_nums = deserialise(conn.execute( + flow_nums = deserialise_set(conn.execute( 'SELECT DISTINCT flow_nums FROM task_states;').fetchall()[0][0]) if len(flow_nums) != 1: raise CylcError( diff --git a/cylc/flow/xtrigger_mgr.py b/cylc/flow/xtrigger_mgr.py index f6595b400a3..d23bc936203 100644 --- a/cylc/flow/xtrigger_mgr.py +++ b/cylc/flow/xtrigger_mgr.py @@ -37,9 +37,14 @@ from cylc.flow.subprocctx import add_kwarg_to_sig from cylc.flow.subprocpool import get_xtrig_func from cylc.flow.xtriggers.wall_clock import _wall_clock +from cylc.flow.xtriggers.workflow_state import ( + workflow_state, + _workflow_state_backcompat, + _upgrade_workflow_state_sig, +) if TYPE_CHECKING: - from inspect import BoundArguments + from inspect import BoundArguments, Signature from cylc.flow.broadcast_mgr import BroadcastMgr from cylc.flow.data_store_mgr import DataStoreMgr from cylc.flow.subprocctx import SubFuncContext @@ -159,6 +164,245 @@ class TemplateVariables(Enum): RE_STR_TMPL = re.compile(r'(? None: + """Add a new xtrigger function. + + Args: + label: xtrigger label + fctx: function context + fdir: module directory + + """ + if label in self.functx_map: + # we've already seen this one + return + + if ( + not label.startswith('_cylc_retry_') and not + label.startswith('_cylc_submit_retry_') + ): + # (the "_wall_clock" function fails "wall_clock" validation) + self._validate(label, fctx, fdir) + + self.functx_map[label] = fctx + + if fctx.func_kwargs.pop( + 'sequential', + self.sequential_xtriggers_default + ): + self.sequential_xtrigger_labels.add(label) + + if fctx.func_name == "wall_clock": + self.wall_clock_labels.add(label) + + @classmethod + def _validate( + cls, + label: str, + fctx: 'SubFuncContext', + fdir: str, + ) -> None: + """Check xtrigger existence, string templates and function signature. + + Also call a specific xtrigger argument validation function, "validate", + if defined in the xtrigger module. + + Args: + label: xtrigger label + fctx: function context + fdir: function directory + + Raises: + XtriggerConfigError: + * If the function module was not found. + * If the function was not found in the xtrigger module. + * If the function is not callable. + * If any string template in the function context + arguments are not present in the expected template values. + * If the arguments do not match the function signature. + + """ + sig_str = fctx.get_signature() + + try: + func = get_xtrig_func(fctx.mod_name, fctx.func_name, fdir) + except (ImportError, AttributeError) as exc: + raise XtriggerConfigError(label, sig_str, exc) + try: + sig = signature(func) + except TypeError as exc: + # not callable + raise XtriggerConfigError(label, sig_str, exc) + + sig = cls._handle_sequential_kwarg(label, fctx, sig) + + # Validate args and kwargs against the function signature + try: + bound_args = sig.bind(*fctx.func_args, **fctx.func_kwargs) + except TypeError as exc: + err = XtriggerConfigError(label, sig_str, exc) + if func is workflow_state: + bound_args = cls._try_workflow_state_backcompat( + label, fctx, err + ) + else: + raise err + + # Specific xtrigger.validate(), if available. + # Note arg string templating has not been done at this point. + cls._try_xtrig_validate_func( + label, fctx, fdir, bound_args, sig_str + ) + + # Check any string templates in the function arg values (note this + # won't catch bad task-specific values - which are added dynamically). + template_vars = set() + for argv in fctx.func_args + list(fctx.func_kwargs.values()): + if not isinstance(argv, str): + # Not a string arg. + continue + + # check template variables are valid + for match in RE_STR_TMPL.findall(argv): + try: + template_vars.add(TemplateVariables(match)) + except ValueError: + raise XtriggerConfigError( + label, sig_str, + f"Illegal template in xtrigger: {match}", + ) + + # check for deprecated template variables + deprecated_variables = template_vars & { + TemplateVariables.WorkflowName, + TemplateVariables.SuiteName, + TemplateVariables.SuiteRunDir, + TemplateVariables.SuiteShareDir, + } + if deprecated_variables: + LOG.warning( + f'Xtrigger "{label}" uses deprecated template variables:' + f' {", ".join(t.value for t in deprecated_variables)}' + ) + + @staticmethod + def _handle_sequential_kwarg( + label: str, fctx: 'SubFuncContext', sig: 'Signature' + ) -> 'Signature': + """Handle reserved 'sequential' kwarg in xtrigger functions.""" + sequential_param = sig.parameters.get('sequential', None) + if sequential_param: + if not isinstance(sequential_param.default, bool): + raise XtriggerConfigError( + label, fctx.func_name, + ( + "xtrigger has a reserved argument" + " 'sequential' with no boolean default" + ) + ) + fctx.func_kwargs.setdefault('sequential', sequential_param.default) + + elif 'sequential' in fctx.func_kwargs: + # xtrig marked as sequential, so add 'sequential' arg to signature + sig = add_kwarg_to_sig( + sig, 'sequential', fctx.func_kwargs['sequential'] + ) + return sig + + @staticmethod + def _try_xtrig_validate_func( + label: str, + fctx: 'SubFuncContext', + fdir: str, + bound_args: 'BoundArguments', + signature_str: str, + ): + """Call an xtrigger's `validate()` function if it is implemented. + + Raise XtriggerConfigError if validation fails. + + """ + vname = "validate" + if fctx.func_name == _workflow_state_backcompat.__name__: + vname = "_validate_backcompat" + + try: + xtrig_validate_func = get_xtrig_func(fctx.mod_name, vname, fdir) + except (AttributeError, ImportError): + return + bound_args.apply_defaults() + try: + xtrig_validate_func(bound_args.arguments) + except Exception as exc: # Note: catch all errors + raise XtriggerConfigError(label, signature_str, exc) + + # BACK COMPAT: workflow_state_backcompat + # from: 8.0.0 + # to: 8.3.0 + # remove at: 8.x + @classmethod + def _try_workflow_state_backcompat( + cls, + label: str, + fctx: 'SubFuncContext', + err: XtriggerConfigError, + ) -> 'BoundArguments': + """Try to validate args against the old workflow_state signature. + + Raise the original signature check error if this signature check fails. + + Returns the bound arguments for the old signature. + """ + sig = cls._handle_sequential_kwarg( + label, fctx, signature(_workflow_state_backcompat) + ) + try: + bound_args = sig.bind(*fctx.func_args, **fctx.func_kwargs) + except TypeError: + # failed signature check for backcompat function + raise err # original signature check error + + old_sig_str = fctx.get_signature() + upg_sig_str = "workflow_state({})".format( + ", ".join( + f'{k}={v}' for k, v in + _upgrade_workflow_state_sig(bound_args.arguments).items() + if v is not None + ) + ) + LOG.warning( + "(8.3.0) Deprecated function signature used for " + "workflow_state xtrigger was automatically upgraded. Please " + "alter your workflow to use the new syntax:\n" + f" {old_sig_str} --> {upg_sig_str}" + ) + fctx.func_name = _workflow_state_backcompat.__name__ + return bound_args + + class XtriggerManager: """Manage clock triggers and xtrigger functions. @@ -168,8 +412,8 @@ class XtriggerManager: clock_0 = wall_clock() # offset PT0H clock_1 = wall_clock(offset=PT1H) # or wall_clock(PT1H) - workflow_x = workflow_state(workflow=other, - point=%(task_cycle_point)s):PT30S + workflow_x = workflow_state( + workflow_task_id=other, point=%(task_cycle_point)s):PT30S [[graph]] PT1H = ''' @clock_1 & @workflow_x => foo & bar @@ -209,9 +453,7 @@ class XtriggerManager: # "sequential=False" here overrides workflow and function default. clock_0 = wall_clock(sequential=False) workflow_x = workflow_state( - workflow=other, - point=%(task_cycle_point)s, - ):PT30S + workflow_task_id=other, point=%(task_cycle_point)s):PT30S [[graph]] PT1H = ''' @workflow_x => foo & bar # spawned on workflow_x satisfaction @@ -240,8 +482,6 @@ def __init__( workflow_run_dir: Optional[str] = None, workflow_share_dir: Optional[str] = None, ): - # Workflow function and clock triggers by label. - self.functx_map: 'Dict[str, SubFuncContext]' = {} # When next to call a function, by signature. self.t_next_call: dict = {} # Satisfied triggers and their function results, by signature. @@ -249,13 +489,6 @@ def __init__( # Signatures of active functions (waiting on callback). self.active: list = [] - # Clock labels, to avoid repeated string comparisons - self.wall_clock_labels: Set[str] = set() - - # Workflow wide default, used when not specified in xtrigger kwargs. - self.sequential_xtriggers_default = False - # Labels whose xtriggers are sequentially checked. - self.sequential_xtrigger_labels: Set[str] = set() # Gather parentless tasks whose xtrigger(s) have been satisfied # (these will be used to spawn the next occurrence). self.sequential_spawn_next: Set[str] = set() @@ -284,163 +517,17 @@ def __init__( self.broadcast_mgr = broadcast_mgr self.data_store_mgr = data_store_mgr self.do_housekeeping = False + self.xtriggers = XtriggerCollator() - @staticmethod - def check_xtrigger( - label: str, - fctx: 'SubFuncContext', - fdir: str, - ) -> None: - """Generic xtrigger validation: check existence, string templates and - function signature. - - Xtrigger modules may also supply a specific `validate` function - which will be run here. - - Args: - label: xtrigger label - fctx: function context - fdir: function directory - - Raises: - XtriggerConfigError: - * If the function module was not found. - * If the function was not found in the xtrigger module. - * If the function is not callable. - * If any string template in the function context - arguments are not present in the expected template values. - * If the arguments do not match the function signature. - - """ - fname: str = fctx.func_name - - try: - func = get_xtrig_func(fname, fname, fdir) - except ImportError: - raise XtriggerConfigError( - label, f"xtrigger module '{fname}' not found", - ) - except AttributeError: - raise XtriggerConfigError( - label, f"'{fname}' not found in xtrigger module '{fname}'", - ) - - if not callable(func): - raise XtriggerConfigError( - label, f"'{fname}' not callable in xtrigger module '{fname}'", - ) - - sig = signature(func) - sig_str = fctx.get_signature() - - # Handle reserved 'sequential' kwarg: - sequential_param = sig.parameters.get('sequential', None) - if sequential_param: - if not isinstance(sequential_param.default, bool): - raise XtriggerConfigError( - label, - ( - f"xtrigger '{fname}' function definition contains " - "reserved argument 'sequential' that has no " - "boolean default" - ) - ) - fctx.func_kwargs.setdefault('sequential', sequential_param.default) - elif 'sequential' in fctx.func_kwargs: - # xtrig call marked as sequential; add 'sequential' arg to - # signature for validation - sig = add_kwarg_to_sig( - sig, 'sequential', fctx.func_kwargs['sequential'] - ) - - # Validate args and kwargs against the function signature - try: - bound_args = sig.bind( - *fctx.func_args, **fctx.func_kwargs - ) - except TypeError as exc: - raise XtriggerConfigError(label, f"{sig_str}: {exc}") - # Specific xtrigger.validate(), if available. - XtriggerManager.try_xtrig_validate_func( - label, fname, fdir, bound_args, sig_str + def add_xtriggers(self, xtriggers: 'XtriggerCollator'): + """Add pre-collated and validated xtriggers.""" + self.xtriggers.update(xtriggers) + self.xtriggers.sequential_xtriggers_default = ( + xtriggers.sequential_xtriggers_default ) - # Check any string templates in the function arg values (note this - # won't catch bad task-specific values - which are added dynamically). - template_vars = set() - for argv in fctx.func_args + list(fctx.func_kwargs.values()): - if not isinstance(argv, str): - # Not a string arg. - continue - - # check template variables are valid - for match in RE_STR_TMPL.findall(argv): - try: - template_vars.add(TemplateVariables(match)) - except ValueError: - raise XtriggerConfigError( - label, f"Illegal template in xtrigger: {match}", - ) - - # check for deprecated template variables - deprecated_variables = template_vars & { - TemplateVariables.WorkflowName, - TemplateVariables.SuiteName, - TemplateVariables.SuiteRunDir, - TemplateVariables.SuiteShareDir, - } - if deprecated_variables: - LOG.warning( - f'Xtrigger "{label}" uses deprecated template variables:' - f' {", ".join(t.value for t in deprecated_variables)}' - ) - - @staticmethod - def try_xtrig_validate_func( - label: str, - fname: str, - fdir: str, - bound_args: 'BoundArguments', - signature_str: str, - ): - """Call an xtrigger's `validate()` function if it is implemented. - - Raise XtriggerConfigError if validation fails. - """ - try: - xtrig_validate_func = get_xtrig_func(fname, 'validate', fdir) - except (AttributeError, ImportError): - return - bound_args.apply_defaults() - try: - xtrig_validate_func(bound_args.arguments) - except Exception as exc: # Note: catch all errors - raise XtriggerConfigError( - label, f"{signature_str} validation failed: {exc}" - ) - - def add_trig(self, label: str, fctx: 'SubFuncContext', fdir: str) -> None: - """Add a new xtrigger function. - - Call check_xtrigger before this, during validation. - - Args: - label: xtrigger label - fctx: function context - fdir: function module directory - - """ - self.functx_map[label] = fctx - if fctx.func_kwargs.pop( - 'sequential', - self.sequential_xtriggers_default - ): - self.sequential_xtrigger_labels.add(label) - if fctx.func_name == "wall_clock": - self.wall_clock_labels.add(label) - def mutate_trig(self, label, kwargs): - self.functx_map[label].func_kwargs.update(kwargs) + self.xtriggers.functx_map[label].func_kwargs.update(kwargs) def load_xtrigger_for_restart(self, row_idx: int, row: Tuple[str, str]): """Load satisfied xtrigger results from workflow DB. @@ -502,11 +589,11 @@ def get_xtrig_ctx( TemplateVariables.TaskID.value: str(itask.identity) } farg_templ.update(self.farg_templ) - ctx = deepcopy(self.functx_map[label]) + ctx = deepcopy(self.xtriggers.functx_map[label]) args = [] kwargs = {} - if label in self.wall_clock_labels: + if label in self.xtriggers.wall_clock_labels: if "trigger_time" in ctx.func_kwargs: # noqa: SIM401 (readabilty) # Internal (retry timer): trigger_time already set. kwargs["trigger_time"] = ctx.func_kwargs["trigger_time"] @@ -545,7 +632,7 @@ def call_xtriggers_async(self, itask: 'TaskProxy'): itask: task proxy to check. """ for label, sig, ctx, _ in self._get_xtrigs(itask, unsat_only=True): - if label in self.wall_clock_labels: + if label in self.xtriggers.wall_clock_labels: # Special case: quick synchronous clock check. if sig in self.sat_xtrig: # Already satisfied, just update the task @@ -615,7 +702,7 @@ def all_task_seq_xtriggers_satisfied(self, itask: 'TaskProxy') -> bool: return itask.is_xtrigger_sequential and all( itask.state.xtriggers[label] for label in itask.state.xtriggers - if label in self.sequential_xtrigger_labels + if label in self.xtriggers.sequential_xtrigger_labels ) def callback(self, ctx: 'SubFuncContext'): @@ -623,6 +710,9 @@ def callback(self, ctx: 'SubFuncContext'): Record satisfaction status and function results dict. + Log a warning if the xtrigger functions errors, to distinguish + errors from not-satisfied. + Args: ctx (SubFuncContext): function context Raises: @@ -630,15 +720,25 @@ def callback(self, ctx: 'SubFuncContext'): """ sig = ctx.get_signature() self.active.remove(sig) + + if ctx.ret_code != 0: + msg = f"ERROR in xtrigger {sig}" + if ctx.err: + msg += f"\n{ctx.err}" + LOG.warning(msg) + try: satisfied, results = json.loads(ctx.out) except (ValueError, TypeError): return + LOG.debug('%s: returned %s', sig, results) - if satisfied: - # Newly satisfied - self.data_store_mgr.delta_task_xtrigger(sig, True) - self.workflow_db_mgr.put_xtriggers({sig: results}) - LOG.info('xtrigger satisfied: %s = %s', ctx.label, sig) - self.sat_xtrig[sig] = results - self.do_housekeeping = True + if not satisfied: + return + + # Newly satisfied + self.data_store_mgr.delta_task_xtrigger(sig, True) + self.workflow_db_mgr.put_xtriggers({sig: results}) + LOG.info('xtrigger satisfied: %s = %s', ctx.label, sig) + self.sat_xtrig[sig] = results + self.do_housekeeping = True diff --git a/cylc/flow/xtriggers/suite_state.py b/cylc/flow/xtriggers/suite_state.py index 45a8418a832..b2ce3783c32 100644 --- a/cylc/flow/xtriggers/suite_state.py +++ b/cylc/flow/xtriggers/suite_state.py @@ -16,7 +16,7 @@ from cylc.flow import LOG import cylc.flow.flags -from cylc.flow.xtriggers.workflow_state import workflow_state +from cylc.flow.xtriggers.workflow_state import _workflow_state_backcompat if not cylc.flow.flags.cylc7_back_compat: LOG.warning( @@ -72,12 +72,6 @@ def suite_state(suite, task, point, offset=None, status='succeeded', to this xtrigger. """ - return workflow_state( - workflow=suite, - task=task, - point=point, - offset=offset, - status=status, - message=message, - cylc_run_dir=cylc_run_dir + return _workflow_state_backcompat( + suite, task, point, offset, status, message, cylc_run_dir ) diff --git a/cylc/flow/xtriggers/workflow_state.py b/cylc/flow/xtriggers/workflow_state.py index 76755085aa6..a2b7d2f1946 100644 --- a/cylc/flow/xtriggers/workflow_state.py +++ b/cylc/flow/xtriggers/workflow_state.py @@ -14,18 +14,133 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import sqlite3 -from typing import Dict, Optional, Tuple +from typing import Dict, Optional, Tuple, Any +import asyncio +from inspect import signature -from metomi.isodatetime.parsers import TimePointParser - -from cylc.flow.cycling.util import add_offset -from cylc.flow.dbstatecheck import CylcWorkflowDBChecker -from cylc.flow.pathutil import get_cylc_run_dir -from cylc.flow.workflow_files import infer_latest_run_from_id +from cylc.flow.scripts.workflow_state import WorkflowPoller +from cylc.flow.id import tokenise +from cylc.flow.exceptions import WorkflowConfigError +from cylc.flow.task_state import TASK_STATUS_SUCCEEDED def workflow_state( + workflow_task_id: str, + offset: Optional[str] = None, + flow_num: Optional[int] = None, + is_trigger: bool = False, + is_message: bool = False, + alt_cylc_run_dir: Optional[str] = None, +) -> Tuple[bool, Dict[str, Any]]: + """Connect to a workflow DB and check a task status or output. + + If the status or output has been achieved, return {True, result}. + + Arg: + workflow_task_id: + ID (workflow//point/task:selector) of the target task. + offset: + Offset from cycle point as an ISO8601 or integer duration, + e.g. PT1H (1 hour) or P1 (1 integer cycle) + flow_num: + Flow number of the target task. + is_message: + Interpret the task:selector as a task output message + (the default is a task status or trigger) + is_trigger: + Interpret the task:selector as a task trigger name + (only needed if it is also a valid status name) + alt_cylc_run_dir: + Alternate cylc-run directory, e.g. for another user. + + Returns: + tuple: (satisfied, result) + + satisfied: + True if ``satisfied`` else ``False``. + result: + Dict of workflow, task, point, offset, + status, message, trigger, flow_num, run_dir + + """ + poller = WorkflowPoller( + workflow_task_id, + offset, + flow_num, + alt_cylc_run_dir, + TASK_STATUS_SUCCEEDED, + is_trigger, is_message, + old_format=False, + condition=workflow_task_id, + max_polls=1, # (for xtriggers the scheduler does the polling) + interval=0, # irrelevant for 1 poll + args=[] + ) + + # NOTE the results dict item names remain compatible with older usage. + + if asyncio.run(poller.poll()): + results = { + 'workflow': poller.workflow_id, + 'task': poller.task, + 'point': poller.cycle, + } + if poller.alt_cylc_run_dir is not None: + results['cylc_run_dir'] = poller.alt_cylc_run_dir + + if offset is not None: + results['offset'] = poller.offset + + if flow_num is not None: + results["flow_num"] = poller.flow_num + + if poller.is_message: + results['message'] = poller.selector + elif poller.is_trigger: + results['trigger'] = poller.selector + else: + results['status'] = poller.selector + + return (True, results) + else: + return (False, {}) + + +def validate(args: Dict[str, Any]): + """Validate workflow_state xtrigger function args. + + Arguments: + workflow_task_id: + full workflow//cycle/task[:selector] + offset: + must be a valid status + flow_num: + must be an integer + alt_cylc_run_dir: + must be a valid path + + """ + tokens = tokenise(args["workflow_task_id"]) + + if any( + tokens[token] is None + for token in ("workflow", "cycle", "task") + ): + raise WorkflowConfigError( + "Full ID needed: workflow//cycle/task[:selector].") + + if ( + args["flow_num"] is not None and + not isinstance(args["flow_num"], int) + ): + raise WorkflowConfigError("flow_num must be an integer if given.") + + +# BACK COMPAT: workflow_state_backcompat +# from: 8.0.0 +# to: 8.3.0 +# remove at: 8.x +def _workflow_state_backcompat( workflow: str, task: str, point: str, @@ -34,10 +149,9 @@ def workflow_state( message: Optional[str] = None, cylc_run_dir: Optional[str] = None ) -> Tuple[bool, Optional[Dict[str, Optional[str]]]]: - """Connect to a workflow DB and query the requested task state. + """Back-compat wrapper for the workflow_state xtrigger. - * Reports satisfied only if the remote workflow state has been achieved. - * Returns all workflow state args to pass on to triggering tasks. + Note Cylc 7 DBs only stored custom task outputs, not standard ones. Arguments: workflow: @@ -58,15 +172,10 @@ def workflow_state( .. note:: This cannot be specified in conjunction with ``status``. + cylc_run_dir: Alternate cylc-run directory, e.g. for another user. - .. note:: - - This only needs to be supplied if the workflow is running in a - different location to what is specified in the global - configuration (usually ``~/cylc-run``). - Returns: tuple: (satisfied, results) @@ -77,32 +186,7 @@ def workflow_state( to this xtrigger. """ - workflow = infer_latest_run_from_id(workflow, cylc_run_dir) - cylc_run_dir = get_cylc_run_dir(cylc_run_dir) - - if offset is not None: - point = str(add_offset(point, offset)) - - try: - checker = CylcWorkflowDBChecker(cylc_run_dir, workflow) - except (OSError, sqlite3.Error): - # Failed to connect to DB; target workflow may not be started. - return (False, None) - try: - fmt = checker.get_remote_point_format() - except sqlite3.OperationalError as exc: - try: - fmt = checker.get_remote_point_format_compat() - except sqlite3.OperationalError: - raise exc # original error - if fmt: - my_parser = TimePointParser() - point = str(my_parser.parse(point, dump_format=fmt)) - if message is not None: - satisfied = checker.task_state_met(task, point, message=message) - else: - satisfied = checker.task_state_met(task, point, status=status) - results = { + args = { 'workflow': workflow, 'task': task, 'point': point, @@ -111,4 +195,44 @@ def workflow_state( 'message': message, 'cylc_run_dir': cylc_run_dir } - return satisfied, results + upg_args = _upgrade_workflow_state_sig(args) + satisfied, _results = workflow_state(**upg_args) + + return (satisfied, args) + + +# BACK COMPAT: workflow_state_backcompat +# from: 8.0.0 +# to: 8.3.0 +# remove at: 8.x +def _upgrade_workflow_state_sig(args: Dict[str, Any]) -> Dict[str, Any]: + """Return upgraded args for workflow_state, given the deprecated args.""" + is_message = False + workflow_task_id = f"{args['workflow']}//{args['point']}/{args['task']}" + status = args.get('status') + message = args.get('message') + if status is not None: + workflow_task_id += f":{status}" + elif message is not None: + is_message = True + workflow_task_id += f":{message}" + return { + 'workflow_task_id': workflow_task_id, + 'offset': args.get('offset'), + 'alt_cylc_run_dir': args.get('cylc_run_dir'), + 'is_message': is_message, + } + + +# BACK COMPAT: workflow_state_backcompat +# from: 8.0.0 +# to: 8.3.0 +# remove at: 8.x +def _validate_backcompat(args: Dict[str, Any]): + """Validate old workflow_state xtrigger function args. + """ + bound_args = signature(workflow_state).bind( + **_upgrade_workflow_state_sig(args) + ) + bound_args.apply_defaults() + validate(bound_args.arguments) diff --git a/tests/flakyfunctional/cylc-poll/16-execution-time-limit.t b/tests/flakyfunctional/cylc-poll/16-execution-time-limit.t index c437867251d..a01d42e2ab3 100755 --- a/tests/flakyfunctional/cylc-poll/16-execution-time-limit.t +++ b/tests/flakyfunctional/cylc-poll/16-execution-time-limit.t @@ -31,7 +31,7 @@ install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" #------------------------------------------------------------------------------- run_ok "${TEST_NAME_BASE}-validate" cylc validate "${WORKFLOW_NAME}" workflow_run_ok "${TEST_NAME_BASE}-run" \ - cylc play --reference-test -v --no-detach "${WORKFLOW_NAME}" + cylc play --reference-test -v --no-detach "${WORKFLOW_NAME}" --timestamp #------------------------------------------------------------------------------- cmp_times () { # Test if the times $1 and $2 are within $3 seconds of each other. diff --git a/tests/flakyfunctional/events/44-timeout.t b/tests/flakyfunctional/events/44-timeout.t index c28557194d8..35da69e3185 100755 --- a/tests/flakyfunctional/events/44-timeout.t +++ b/tests/flakyfunctional/events/44-timeout.t @@ -43,7 +43,7 @@ ${LOG_INDENT}[(('event-handler-00', 'started'), 1) err] killed on timeout (PT10S WARNING - 1/foo/01 handler:event-handler-00 for task event:started failed __END__ -cylc workflow-state "${WORKFLOW_NAME}" >'workflow-state.log' +cylc workflow-state --old-format "${WORKFLOW_NAME}" >'workflow-state.log' contains_ok 'workflow-state.log' << __END__ stopper, 1, succeeded diff --git a/tests/flakyfunctional/xtriggers/01-workflow_state.t b/tests/flakyfunctional/xtriggers/01-workflow_state.t index 63cc34c1161..a809e7e7975 100644 --- a/tests/flakyfunctional/xtriggers/01-workflow_state.t +++ b/tests/flakyfunctional/xtriggers/01-workflow_state.t @@ -45,7 +45,7 @@ WORKFLOW_LOG="$(cylc cat-log -m 'p' "${WORKFLOW_NAME}")" grep_ok 'WARNING - inactivity timer timed out after PT20S' "${WORKFLOW_LOG}" # ... with 2016/foo succeeded and 2016/FAM waiting. -cylc workflow-state -p '2016' "${WORKFLOW_NAME}" >'workflow_state.out' +cylc workflow-state --old-format "${WORKFLOW_NAME}//2016" >'workflow_state.out' contains_ok 'workflow_state.out' << __END__ foo, 2016, succeeded f3, 2016, waiting @@ -56,12 +56,10 @@ __END__ # Check broadcast of xtrigger outputs to dependent tasks. JOB_LOG="$(cylc cat-log -f 'j' -m 'p' "${WORKFLOW_NAME}//2015/f1")" contains_ok "${JOB_LOG}" << __END__ + upstream_workflow="${WORKFLOW_NAME_UPSTREAM}" upstream_task="foo" upstream_point="2015" - upstream_status="succeeded" - upstream_message="data ready" - upstream_offset="None" - upstream_workflow="${WORKFLOW_NAME_UPSTREAM}" + upstream_trigger="data_ready" __END__ # Check broadcast of xtrigger outputs is recorded: 1) in the workflow log... @@ -73,14 +71,11 @@ contains_ok "${WORKFLOW_LOG}" << __LOG_BROADCASTS__ ${LOG_INDENT}+ [2015/f1] [environment]upstream_workflow=${WORKFLOW_NAME_UPSTREAM} ${LOG_INDENT}+ [2015/f1] [environment]upstream_task=foo ${LOG_INDENT}+ [2015/f1] [environment]upstream_point=2015 -${LOG_INDENT}+ [2015/f1] [environment]upstream_offset=None -${LOG_INDENT}+ [2015/f1] [environment]upstream_status=succeeded -${LOG_INDENT}+ [2015/f1] [environment]upstream_message=data ready +${LOG_INDENT}+ [2015/f1] [environment]upstream_trigger=data_ready ${LOG_INDENT}- [2015/f1] [environment]upstream_workflow=${WORKFLOW_NAME_UPSTREAM} ${LOG_INDENT}- [2015/f1] [environment]upstream_task=foo ${LOG_INDENT}- [2015/f1] [environment]upstream_point=2015 -${LOG_INDENT}- [2015/f1] [environment]upstream_status=succeeded -${LOG_INDENT}- [2015/f1] [environment]upstream_message=data ready +${LOG_INDENT}- [2015/f1] [environment]upstream_trigger=data_ready __LOG_BROADCASTS__ # ... and 2) in the DB. TEST_NAME="${TEST_NAME_BASE}-check-broadcast-in-db" @@ -93,17 +88,14 @@ sqlite3 "${DB_FILE}" \ 'SELECT change, point, namespace, key, value FROM broadcast_events ORDER BY time, change, point, namespace, key' >"${NAME}" contains_ok "${NAME}" << __DB_BROADCASTS__ -+|2015|f1|[environment]upstream_message|data ready -+|2015|f1|[environment]upstream_offset|None -+|2015|f1|[environment]upstream_point|2015 -+|2015|f1|[environment]upstream_status|succeeded +|2015|f1|[environment]upstream_workflow|${WORKFLOW_NAME_UPSTREAM} +|2015|f1|[environment]upstream_task|foo --|2015|f1|[environment]upstream_message|data ready --|2015|f1|[environment]upstream_point|2015 --|2015|f1|[environment]upstream_status|succeeded ++|2015|f1|[environment]upstream_point|2015 ++|2015|f1|[environment]upstream_trigger|data_ready -|2015|f1|[environment]upstream_workflow|${WORKFLOW_NAME_UPSTREAM} -|2015|f1|[environment]upstream_task|foo +-|2015|f1|[environment]upstream_point|2015 +-|2015|f1|[environment]upstream_trigger|data_ready __DB_BROADCASTS__ purge @@ -112,4 +104,3 @@ purge cylc stop --now "${WORKFLOW_NAME_UPSTREAM}" --max-polls=20 --interval=2 \ >'/dev/null' 2>&1 purge "${WORKFLOW_NAME_UPSTREAM}" -exit diff --git a/tests/flakyfunctional/xtriggers/01-workflow_state/flow.cylc b/tests/flakyfunctional/xtriggers/01-workflow_state/flow.cylc index 9502a4bf82f..609b0be3a05 100644 --- a/tests/flakyfunctional/xtriggers/01-workflow_state/flow.cylc +++ b/tests/flakyfunctional/xtriggers/01-workflow_state/flow.cylc @@ -1,15 +1,14 @@ #!Jinja2 [scheduler] - cycle point format = %Y + cycle point format = %Y [[events]] - inactivity timeout = PT20S - abort on inactivity timeout = True + inactivity timeout = PT20S + abort on inactivity timeout = True [scheduling] initial cycle point = 2011 final cycle point = 2016 [[xtriggers]] - upstream = workflow_state(workflow={{UPSTREAM}}, task=foo,\ - point=%(point)s, message='data ready'):PT1S + upstream = workflow_state("{{UPSTREAM}}//%(point)s/foo:data_ready"):PT1S [[graph]] P1Y = """ foo diff --git a/tests/flakyfunctional/xtriggers/01-workflow_state/upstream/flow.cylc b/tests/flakyfunctional/xtriggers/01-workflow_state/upstream/flow.cylc index 5787f0d29ba..19eb3214844 100644 --- a/tests/flakyfunctional/xtriggers/01-workflow_state/upstream/flow.cylc +++ b/tests/flakyfunctional/xtriggers/01-workflow_state/upstream/flow.cylc @@ -4,7 +4,7 @@ initial cycle point = 2010 final cycle point = 2015 [[graph]] - P1Y = "foo:x => bar" + P1Y = "foo:data_ready => bar" [runtime] [[root]] script = true @@ -12,4 +12,4 @@ [[foo]] script = cylc message "data ready" [[[outputs]]] - x = "data ready" + data_ready = "data ready" diff --git a/tests/functional/cylc-cat-log/04-local-tail.t b/tests/functional/cylc-cat-log/04-local-tail.t index 741e8bb58a2..2bed3cf4162 100755 --- a/tests/functional/cylc-cat-log/04-local-tail.t +++ b/tests/functional/cylc-cat-log/04-local-tail.t @@ -29,7 +29,7 @@ create_test_global_config "" " TEST_NAME="${TEST_NAME_BASE}-validate" run_ok "${TEST_NAME}" cylc validate "${WORKFLOW_NAME}" workflow_run_ok "${TEST_NAME_BASE}-run" cylc play "${WORKFLOW_NAME}" -cylc workflow-state "${WORKFLOW_NAME}" -t 'foo' -p '1' -S 'start' --interval=1 +cylc workflow-state "${WORKFLOW_NAME}//1/foo:started" --interval=1 sleep 1 TEST_NAME=${TEST_NAME_BASE}-cat-log cylc cat-log "${WORKFLOW_NAME}//1/foo" -f o -m t > "${TEST_NAME}.out" diff --git a/tests/functional/cylc-config/00-simple.t b/tests/functional/cylc-config/00-simple.t index 89c3713628d..00981edc7d3 100755 --- a/tests/functional/cylc-config/00-simple.t +++ b/tests/functional/cylc-config/00-simple.t @@ -52,10 +52,7 @@ cmp_ok "${TEST_NAME}.stderr" - stdout.1 -sort "$TEST_SOURCE_DIR/${TEST_NAME_BASE}/section2.stdout" > stdout.2 -cmp_ok stdout.1 stdout.2 +cmp_ok "${TEST_NAME}.stdout" "$TEST_SOURCE_DIR/${TEST_NAME_BASE}/section2.stdout" cmp_ok "${TEST_NAME}.stderr" - db-bar.2 cmp_ok "db-bar.2" - << __OUT__ -["expired"] +{"expired": "(manually completed)"} __OUT__ purge diff --git a/tests/functional/data-store/00-prune-optional-break.t b/tests/functional/data-store/00-prune-optional-break.t index 39de0225e34..9b09ac8d156 100755 --- a/tests/functional/data-store/00-prune-optional-break.t +++ b/tests/functional/data-store/00-prune-optional-break.t @@ -37,7 +37,7 @@ d => e script = false [[d]] script = """ -cylc workflow-state \$CYLC_WORKFLOW_ID --task=b --point=1 --status=failed --interval=2 +cylc workflow-state \${CYLC_WORKFLOW_ID}//1/b:failed --interval=2 cylc pause \$CYLC_WORKFLOW_ID """ __FLOW__ @@ -45,7 +45,7 @@ __FLOW__ # run workflow run_ok "${TEST_NAME_BASE}-run" cylc play "${WORKFLOW_NAME}" -cylc workflow-state "${WORKFLOW_NAME}" --task=d --point=1 --status=succeeded --interval=2 --max-polls=60 +cylc workflow-state "${WORKFLOW_NAME}/1/d:succeeded" --interval=2 --max-polls=60 # query workflow TEST_NAME="${TEST_NAME_BASE}-prune-optional-break" diff --git a/tests/functional/deprecations/01-cylc8-basic/validation.stderr b/tests/functional/deprecations/01-cylc8-basic/validation.stderr index 288df3f98e2..034031f3a71 100644 --- a/tests/functional/deprecations/01-cylc8-basic/validation.stderr +++ b/tests/functional/deprecations/01-cylc8-basic/validation.stderr @@ -1,4 +1,5 @@ -WARNING - deprecated items were automatically upgraded in "workflow definition" +WARNING - Obsolete config items were automatically deleted. Please check your workflow and remove them permanently. +WARNING - Deprecated config items were automatically upgraded. Please alter your workflow to use the new syntax. WARNING - * (8.0.0) [cylc]force run mode - DELETED (OBSOLETE) WARNING - * (8.0.0) [cylc][authentication] - DELETED (OBSOLETE) WARNING - * (8.0.0) [cylc]log resolved dependencies - DELETED (OBSOLETE) @@ -13,6 +14,7 @@ WARNING - * (8.0.0) [cylc][reference test] - DELETED (OBSOLETE) WARNING - * (8.0.0) [cylc][simulation]disable suite event handlers - DELETED (OBSOLETE) WARNING - * (8.0.0) [cylc][simulation] - DELETED (OBSOLETE) WARNING - * (8.0.0) [cylc]task event mail interval -> [cylc][mail]task event batch interval - value unchanged +WARNING - * (8.0.0) [runtime][foo, cat, dog][suite state polling] -> [runtime][foo, cat, dog][workflow state polling] - value unchanged WARNING - * (8.0.0) [cylc][parameters] -> [task parameters] - value unchanged WARNING - * (8.0.0) [cylc][parameter templates] -> [task parameters][templates] - value unchanged WARNING - * (8.0.0) [cylc][events]mail to -> [cylc][mail]to - value unchanged @@ -24,7 +26,6 @@ WARNING - * (8.0.0) [cylc][events]mail smtp - DELETED (OBSOLETE) - use "global. WARNING - * (8.0.0) [runtime][foo, cat, dog][events]mail smtp - DELETED (OBSOLETE) - use "global.cylc[scheduler][mail]smtp" instead WARNING - * (8.0.0) [scheduling]max active cycle points -> [scheduling]runahead limit - "2" -> "P1" WARNING - * (8.0.0) [scheduling]hold after point -> [scheduling]hold after cycle point - value unchanged -WARNING - * (8.0.0) [runtime][foo, cat, dog][suite state polling] -> [runtime][foo, cat, dog][workflow state polling] - value unchanged WARNING - * (8.0.0) [runtime][foo, cat, dog][job]execution polling intervals -> [runtime][foo, cat, dog]execution polling intervals - value unchanged WARNING - * (8.0.0) [runtime][foo, cat, dog][job]execution retry delays -> [runtime][foo, cat, dog]execution retry delays - value unchanged WARNING - * (8.0.0) [runtime][foo, cat, dog][job]execution time limit -> [runtime][foo, cat, dog]execution time limit - value unchanged @@ -47,6 +48,6 @@ WARNING - * (8.0.0) [cylc][events]abort if timeout handler fails - DELETED (OBS WARNING - * (8.0.0) [cylc][events]abort if inactivity handler fails - DELETED (OBSOLETE) WARNING - * (8.0.0) [cylc][events]abort if stalled handler fails - DELETED (OBSOLETE) WARNING - * (8.0.0) [cylc] -> [scheduler] - value unchanged -WARNING - deprecated graph items were automatically upgraded in "workflow definition": +WARNING - graph items were automatically upgraded in "workflow definition": * (8.0.0) [scheduling][dependencies][X]graph -> [scheduling][graph]X - for X in: P1D diff --git a/tests/functional/flow-triggers/11-wait-merge.t b/tests/functional/flow-triggers/11-wait-merge.t index cb3218ae463..d869cc19600 100644 --- a/tests/functional/flow-triggers/11-wait-merge.t +++ b/tests/functional/flow-triggers/11-wait-merge.t @@ -30,14 +30,14 @@ QUERY="SELECT cycle, name,flow_nums,outputs FROM task_outputs;" run_ok "${TEST_NAME}" sqlite3 "${DB}" "$QUERY" cmp_ok "${TEST_NAME}.stdout" <<\__END__ -1|a|[1]|["submitted", "started", "succeeded"] -1|b|[1]|["submitted", "started", "succeeded"] -1|a|[2]|["submitted", "started", "succeeded"] -1|c|[2]|["submitted", "started", "x"] -1|c|[1, 2]|["submitted", "started", "succeeded", "x"] -1|x|[1, 2]|["submitted", "started", "succeeded"] -1|d|[1, 2]|["submitted", "started", "succeeded"] -1|b|[2]|["submitted", "started", "succeeded"] +1|a|[1]|{"submitted": "submitted", "started": "started", "succeeded": "succeeded"} +1|b|[1]|{"submitted": "submitted", "started": "started", "succeeded": "succeeded"} +1|a|[2]|{"submitted": "submitted", "started": "started", "succeeded": "succeeded"} +1|c|[2]|{"submitted": "submitted", "started": "started", "x": "x"} +1|c|[1, 2]|{"submitted": "submitted", "started": "started", "succeeded": "succeeded", "x": "x"} +1|x|[1, 2]|{"submitted": "submitted", "started": "started", "succeeded": "succeeded"} +1|d|[1, 2]|{"submitted": "submitted", "started": "started", "succeeded": "succeeded"} +1|b|[2]|{"submitted": "submitted", "started": "started", "succeeded": "succeeded"} __END__ purge diff --git a/tests/functional/job-submission/16-timeout.t b/tests/functional/job-submission/16-timeout.t index b6042b56dad..10410cb7cb7 100755 --- a/tests/functional/job-submission/16-timeout.t +++ b/tests/functional/job-submission/16-timeout.t @@ -51,7 +51,7 @@ ERROR - [jobs-submit cmd] cylc jobs-submit --debug ${DEFAULT_PATHS} -- '${JOB_LO [jobs-submit err] killed on timeout (PT10S) __END__ -cylc workflow-state "${WORKFLOW_NAME}" > workflow-state.log +cylc workflow-state --old-format "${WORKFLOW_NAME}" > workflow-state.log # make sure foo submit failed and the stopper ran contains_ok workflow-state.log << __END__ diff --git a/tests/functional/logging/04-dev_mode.t b/tests/functional/logging/04-dev_mode.t index 45b59720875..2ff83ddb4aa 100644 --- a/tests/functional/logging/04-dev_mode.t +++ b/tests/functional/logging/04-dev_mode.t @@ -35,12 +35,12 @@ run_ok "${TEST_NAME_BASE}-validate-plain" \ cylc validate "${WORKFLOW_NAME}" run_ok "${TEST_NAME_BASE}-validate-vvv" \ - cylc validate -vvv "${WORKFLOW_NAME}" + cylc validate --timestamp -vvv "${WORKFLOW_NAME}" grep_ok " DEBUG - \[config:.*\]" "${TEST_NAME_BASE}-validate-vvv.stderr" run_ok "${TEST_NAME_BASE}-validate-vvv--no-timestamp" \ - cylc validate -vvv --no-timestamp "${WORKFLOW_NAME}" + cylc validate -vvv "${WORKFLOW_NAME}" grep_ok "^DEBUG - \[config:.*\]" "${TEST_NAME_BASE}-validate-vvv--no-timestamp.stderr" purge diff --git a/tests/functional/optional-outputs/08-finish-fail-c7-c8.t b/tests/functional/optional-outputs/08-finish-fail-c7-c8.t index e9cfbf831b1..0200dfc5e35 100644 --- a/tests/functional/optional-outputs/08-finish-fail-c7-c8.t +++ b/tests/functional/optional-outputs/08-finish-fail-c7-c8.t @@ -32,7 +32,7 @@ mv "${WORKFLOW_RUN_DIR}/suite.rc" "${WORKFLOW_RUN_DIR}/flow.cylc" TEST_NAME="${TEST_NAME_BASE}-validate_as_c8" run_ok "${TEST_NAME}" cylc validate "${WORKFLOW_NAME}" -DEPR_MSG="deprecated graph items were automatically upgraded" # (not back-compat) +DEPR_MSG="graph items were automatically upgraded" # (not back-compat) grep_ok "${DEPR_MSG}" "${TEST_NAME}.stderr" # No stall expected. diff --git a/tests/functional/queues/02-queueorder.t b/tests/functional/queues/02-queueorder.t index ba269eb0cfc..c2babbfc784 100644 --- a/tests/functional/queues/02-queueorder.t +++ b/tests/functional/queues/02-queueorder.t @@ -22,7 +22,7 @@ set_test_number 3 install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" run_ok "${TEST_NAME_BASE}-validate" cylc validate "${WORKFLOW_NAME}" run_ok "${TEST_NAME_BASE}-run" \ - cylc play "${WORKFLOW_NAME}" --reference-test --debug --no-detach + cylc play "${WORKFLOW_NAME}" --reference-test --debug --no-detach --timestamp run_ok "${TEST_NAME_BASE}-test" bash -o pipefail -c " cylc cat-log '${WORKFLOW_NAME}' | grep 'proc_n.*submitted at' | diff --git a/tests/functional/queues/qsize/flow.cylc b/tests/functional/queues/qsize/flow.cylc index 4cfce86d0a5..e8f65d6816f 100644 --- a/tests/functional/queues/qsize/flow.cylc +++ b/tests/functional/queues/qsize/flow.cylc @@ -13,11 +13,11 @@ inherit = FAM [[monitor]] script = """ - N_SUCCEDED=0 - while ((N_SUCCEDED < 12)); do + N_SUCCEEDED=0 + while ((N_SUCCEEDED < 12)); do sleep 1 - N_RUNNING=$(cylc workflow-state $CYLC_WORKFLOW_ID -S running | wc -l) + N_RUNNING=$(cylc dump -t $CYLC_WORKFLOW_ID | grep running | wc -l) ((N_RUNNING <= {{q_size}})) # check - N_SUCCEDED=$(cylc workflow-state $CYLC_WORKFLOW_ID -S succeeded | wc -l) + N_SUCCEEDED=$(cylc workflow-state "${CYLC_WORKFLOW_ID}//*/*:succeeded" | wc -l) done """ diff --git a/tests/functional/reload/03-queues/flow.cylc b/tests/functional/reload/03-queues/flow.cylc index 21af35196c9..8520a61e15a 100644 --- a/tests/functional/reload/03-queues/flow.cylc +++ b/tests/functional/reload/03-queues/flow.cylc @@ -28,13 +28,15 @@ cylc__job__poll_grep_workflow_log 'Reload completed' script = """ cylc__job__wait_cylc_message_started while true; do - RUNNING=$(cylc workflow-state $CYLC_WORKFLOW_ID -S running | wc -l) + RUNNING=$(cylc dump -t "${CYLC_WORKFLOW_ID}" | grep running | wc -l) # Should be max of: monitor plus 3 members of q1 + echo "RUNNING $RUNNING" if ((RUNNING > 4)); then break fi sleep 1 - SUCCEEDED=$(cylc workflow-state $CYLC_WORKFLOW_ID -S succeeded | wc -l) + SUCCEEDED=$(cylc workflow-state "${CYLC_WORKFLOW_ID}//*/*:succeeded" --max-polls=1 | wc -l) + echo "SUCCEEDED $SUCCEEDED" if ((SUCCEEDED==13)); then break fi diff --git a/tests/functional/reload/22-remove-task-cycling.t b/tests/functional/reload/22-remove-task-cycling.t index 9936857ac2f..2db87601d77 100644 --- a/tests/functional/reload/22-remove-task-cycling.t +++ b/tests/functional/reload/22-remove-task-cycling.t @@ -73,7 +73,7 @@ TEST_NAME="${TEST_NAME_BASE}-run" workflow_run_ok "${TEST_NAME}" cylc play --debug --no-detach "${WORKFLOW_NAME}" TEST_NAME="${TEST_NAME_BASE}-result" -cylc workflow-state "${WORKFLOW_NAME}" > workflow-state.log +cylc workflow-state --old-format "${WORKFLOW_NAME}" > workflow-state.log contains_ok workflow-state.log << __END__ foo, 1, succeeded bar, 1, succeeded diff --git a/tests/functional/restart/30-outputs.t b/tests/functional/restart/30-outputs.t index 997b30031d4..fbf4d887cc1 100755 --- a/tests/functional/restart/30-outputs.t +++ b/tests/functional/restart/30-outputs.t @@ -28,7 +28,7 @@ run_ok "${TEST_NAME_BASE}-validate" cylc validate "${WORKFLOW_NAME}" workflow_run_ok "${TEST_NAME_BASE}-run" cylc play --no-detach "${WORKFLOW_NAME}" sqlite3 "${WORKFLOW_RUN_DIR}/log/db" \ 'SELECT outputs FROM task_outputs WHERE name IS "t1"' >'sqlite3.out' -cmp_json 'sqlite3.out' 'sqlite3.out' <<<'["submitted", "started", "succeeded", "hello"]' +cmp_json 'sqlite3.out' 'sqlite3.out' <<<'{"submitted": "submitted", "started": "started", "succeeded": "succeeded", "hello": "hi there"}' sqlite3 "${WORKFLOW_RUN_DIR}/log/db" 'SELECT * FROM task_pool' >'task-pool.out' cmp_ok 'task-pool.out' <'/dev/null' diff --git a/tests/functional/restart/30-outputs/flow.cylc b/tests/functional/restart/30-outputs/flow.cylc index 7a2168dac6a..51d7dc90c66 100644 --- a/tests/functional/restart/30-outputs/flow.cylc +++ b/tests/functional/restart/30-outputs/flow.cylc @@ -13,9 +13,9 @@ t1:greet? => t3 """ [runtime] [[t1]] - script = cylc message 'hello' + script = cylc message -- 'hi there' [[[outputs]]] - hello = hello + hello = "hi there" greet = greeting [[t2, t3]] script = true diff --git a/tests/functional/restart/34-auto-restart-basic.t b/tests/functional/restart/34-auto-restart-basic.t index 62b6677fc7e..63e6f2f2c94 100644 --- a/tests/functional/restart/34-auto-restart-basic.t +++ b/tests/functional/restart/34-auto-restart-basic.t @@ -51,10 +51,8 @@ __FLOW_CONFIG__ # run workflow on localhost normally create_test_global_config '' "${BASE_GLOBAL_CONFIG}" -run_ok "${TEST_NAME}-workflow-start" \ - cylc play "${WORKFLOW_NAME}" --host=localhost -s 'FOO="foo"' -v -cylc workflow-state "${WORKFLOW_NAME}" --task='task_foo01' \ - --status='succeeded' --point=1 --interval=1 --max-polls=20 >& $ERR +run_ok "${TEST_NAME}-workflow-start" cylc play "${WORKFLOW_NAME}" --host=localhost -s 'FOO="foo"' -v +cylc workflow-state "${WORKFLOW_NAME}//1/task_foo01:succeeded" --interval=1 --max-polls=20 >& $ERR # condemn localhost create_test_global_config '' " @@ -70,7 +68,7 @@ log_scan "${TEST_NAME}-shutdown-log-scan" "${FILE}" 20 1 \ 'Workflow shutting down - REQUEST(NOW-NOW)' \ "Attempting to restart on \"${CYLC_TEST_HOST}\"" \ "Workflow now running on \"${CYLC_TEST_HOST}\"" -LATEST_TASK=$(cylc workflow-state "${WORKFLOW_NAME}" -S succeeded \ +LATEST_TASK=$(cylc workflow-state --old-format "${WORKFLOW_NAME}//*/*:succeeded" \ | cut -d ',' -f 1 | sort | tail -n 1 | sed 's/task_foo//') # test restart procedure - scan the second log file created on restart @@ -78,9 +76,9 @@ poll_workflow_restart FILE=$(cylc cat-log "${WORKFLOW_NAME}" -m p |xargs readlink -f) log_scan "${TEST_NAME}-restart-log-scan" "${FILE}" 20 1 \ "Scheduler: url=tcp://$(get_fqdn "${CYLC_TEST_HOST}")" -run_ok "${TEST_NAME}-restart-success" cylc workflow-state "${WORKFLOW_NAME}" \ - --task="$(printf 'task_foo%02d' $(( LATEST_TASK + 3 )))" \ - --status='succeeded' --point=1 --interval=1 --max-polls=60 +run_ok "${TEST_NAME}-restart-success" \ + cylc workflow-state "${WORKFLOW_NAME}//1/$(printf 'task_foo%02d' $(( LATEST_TASK + 3 ))):succeeded" \ + --interval=1 --max-polls=60 # check the command the workflow has been restarted with run_ok "${TEST_NAME}-contact" cylc get-contact "${WORKFLOW_NAME}" diff --git a/tests/functional/restart/38-auto-restart-stopping.t b/tests/functional/restart/38-auto-restart-stopping.t index 6e6ebf2020e..92a3c9c0677 100644 --- a/tests/functional/restart/38-auto-restart-stopping.t +++ b/tests/functional/restart/38-auto-restart-stopping.t @@ -52,8 +52,7 @@ ${BASE_GLOBAL_CONFIG} " run_ok "${TEST_NAME}-workflow-start" cylc play "${WORKFLOW_NAME}" --host=localhost -cylc workflow-state "${WORKFLOW_NAME}" --task='foo' --status='running' --point=1 \ - --interval=1 --max-polls=20 >& $ERR +cylc workflow-state "${WORKFLOW_NAME}//1/foo:running" --interval=1 --max-polls=20 >& $ERR # condemn localhost create_test_global_config '' " diff --git a/tests/functional/restart/41-auto-restart-local-jobs.t b/tests/functional/restart/41-auto-restart-local-jobs.t index 0ee771a982d..bfd8161ef3f 100644 --- a/tests/functional/restart/41-auto-restart-local-jobs.t +++ b/tests/functional/restart/41-auto-restart-local-jobs.t @@ -62,8 +62,7 @@ cylc play "${WORKFLOW_NAME}" # ensure the workflow WAITS for local jobs to complete before restarting TEST_NAME="${TEST_NAME_BASE}-normal-mode" -cylc workflow-state "${WORKFLOW_NAME}" --task='foo' --status='running' --point=1 \ - --interval=1 --max-polls=20 >& $ERR +cylc workflow-state "${WORKFLOW_NAME}//1/foo:running" --interval=1 --max-polls=20 >& $ERR create_test_global_config '' " ${BASE_GLOBAL_CONFIG} @@ -93,7 +92,7 @@ log_scan "${TEST_NAME}-restart-log-scan" "$LOG_FILE" 20 1 \ TEST_NAME="${TEST_NAME_BASE}-force-mode" cylc trigger "${WORKFLOW_NAME}//1/bar" -cylc workflow-state "${WORKFLOW_NAME}" --task='bar' --status='running' --point=1 \ +cylc workflow-state "${WORKFLOW_NAME}//1/bar:running" --point=1 \ --interval=1 --max-polls=20 >& $ERR create_test_global_config '' " diff --git a/tests/functional/workflow-state/00-polling.t b/tests/functional/workflow-state/00-polling.t index f073f6e3c02..cafea36cd9e 100644 --- a/tests/functional/workflow-state/00-polling.t +++ b/tests/functional/workflow-state/00-polling.t @@ -20,7 +20,7 @@ . "$(dirname "$0")/test_header" #------------------------------------------------------------------------------- -set_test_number 5 +set_test_number 7 #------------------------------------------------------------------------------- install_workflow "${TEST_NAME_BASE}" 'polling' #------------------------------------------------------------------------------- @@ -34,6 +34,15 @@ cylc install "${TEST_DIR}/upstream" --workflow-name="${UPSTREAM}" --no-run-name TEST_NAME="${TEST_NAME_BASE}-validate-upstream" run_ok "${TEST_NAME}" cylc validate --debug "${UPSTREAM}" +TEST_NAME=${TEST_NAME_BASE}-validate-polling-y +run_fail "${TEST_NAME}" \ + cylc validate --set="UPSTREAM='${UPSTREAM}'" --set="OUTPUT=':y'" "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stderr" <<__ERR__ +WorkflowConfigError: Polling task "l-mess" must configure a target status or output message in \ +the graph (:y) or task definition (message = "the quick brown fox") but not both. +__ERR__ + TEST_NAME=${TEST_NAME_BASE}-validate-polling run_ok "${TEST_NAME}" \ cylc validate --debug --set="UPSTREAM='${UPSTREAM}'" "${WORKFLOW_NAME}" @@ -48,8 +57,8 @@ cylc config -d \ --set="UPSTREAM='${UPSTREAM}'" -i '[runtime][lbad]script' "${WORKFLOW_NAME}" \ >'lbad.script' cmp_ok 'lbad.script' << __END__ -echo cylc workflow-state --task=bad --point=\$CYLC_TASK_CYCLE_POINT --interval=2 --max-polls=20 --status=failed ${UPSTREAM} -cylc workflow-state --task=bad --point=\$CYLC_TASK_CYCLE_POINT --interval=2 --max-polls=20 --status=failed ${UPSTREAM} +echo cylc workflow-state ${UPSTREAM}//\$CYLC_TASK_CYCLE_POINT/bad:failed --interval=2 --max-polls=20 +cylc workflow-state ${UPSTREAM}//\$CYLC_TASK_CYCLE_POINT/bad:failed --interval=2 --max-polls=20 __END__ # check auto-generated task script for l-good @@ -57,8 +66,8 @@ cylc config -d \ --set="UPSTREAM='${UPSTREAM}'" -i '[runtime][l-good]script' "${WORKFLOW_NAME}" \ >'l-good.script' cmp_ok 'l-good.script' << __END__ -echo cylc workflow-state --task=good-stuff --point=\$CYLC_TASK_CYCLE_POINT --interval=2 --max-polls=20 --status=succeeded ${UPSTREAM} -cylc workflow-state --task=good-stuff --point=\$CYLC_TASK_CYCLE_POINT --interval=2 --max-polls=20 --status=succeeded ${UPSTREAM} +echo cylc workflow-state ${UPSTREAM}//\$CYLC_TASK_CYCLE_POINT/good-stuff:succeeded --interval=2 --max-polls=20 +cylc workflow-state ${UPSTREAM}//\$CYLC_TASK_CYCLE_POINT/good-stuff:succeeded --interval=2 --max-polls=20 __END__ #------------------------------------------------------------------------------- diff --git a/tests/functional/workflow-state/01-polling.t b/tests/functional/workflow-state/01-polling.t index b4694c35a6c..099df9e3a9b 100644 --- a/tests/functional/workflow-state/01-polling.t +++ b/tests/functional/workflow-state/01-polling.t @@ -44,8 +44,8 @@ cylc config -d \ --set="UPSTREAM='${UPSTREAM}'" \ -i '[runtime][lbad]script' "${WORKFLOW_NAME}" >'lbad.script' cmp_ok 'lbad.script' << __END__ -echo cylc workflow-state --task=bad --point=\$CYLC_TASK_CYCLE_POINT --interval=2 --max-polls=20 --status=failed ${UPSTREAM} -cylc workflow-state --task=bad --point=\$CYLC_TASK_CYCLE_POINT --interval=2 --max-polls=20 --status=failed ${UPSTREAM} +echo cylc workflow-state ${UPSTREAM}//\$CYLC_TASK_CYCLE_POINT/bad:failed --interval=2 --max-polls=20 +cylc workflow-state ${UPSTREAM}//\$CYLC_TASK_CYCLE_POINT/bad:failed --interval=2 --max-polls=20 __END__ # check auto-generated task script for l-good @@ -53,8 +53,8 @@ cylc config -d \ --set="UPSTREAM='${UPSTREAM}'" \ -i '[runtime][l-good]script' "${WORKFLOW_NAME}" >'l-good.script' cmp_ok 'l-good.script' << __END__ -echo cylc workflow-state --task=good-stuff --point=\$CYLC_TASK_CYCLE_POINT --interval=2 --max-polls=20 --status=succeeded ${UPSTREAM} -cylc workflow-state --task=good-stuff --point=\$CYLC_TASK_CYCLE_POINT --interval=2 --max-polls=20 --status=succeeded ${UPSTREAM} +echo cylc workflow-state ${UPSTREAM}//\$CYLC_TASK_CYCLE_POINT/good-stuff:succeeded --interval=2 --max-polls=20 +cylc workflow-state ${UPSTREAM}//\$CYLC_TASK_CYCLE_POINT/good-stuff:succeeded --interval=2 --max-polls=20 __END__ #------------------------------------------------------------------------------- diff --git a/tests/functional/workflow-state/05-message.t b/tests/functional/workflow-state/05-message.t deleted file mode 100755 index 89acd6d83e3..00000000000 --- a/tests/functional/workflow-state/05-message.t +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash -# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. -# Copyright (C) NIWA & British Crown (Met Office) & Contributors. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -#------------------------------------------------------------------------------- -# Test cylc workflow-state "template" option -. "$(dirname "$0")/test_header" -#------------------------------------------------------------------------------- -set_test_number 2 -#------------------------------------------------------------------------------- -install_workflow "${TEST_NAME_BASE}" message -#------------------------------------------------------------------------------- -TEST_NAME="${TEST_NAME_BASE}-run" -workflow_run_ok "${TEST_NAME}" cylc play --reference-test --debug --no-detach "${WORKFLOW_NAME}" -#------------------------------------------------------------------------------- -TEST_NAME=${TEST_NAME_BASE}-cli-template -run_ok "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}" -p 20100101T0000Z \ - --message=hello --task=t1 --max-polls=1 -#------------------------------------------------------------------------------- -purge -#------------------------------------------------------------------------------- -exit 0 diff --git a/tests/functional/workflow-state/05-output.t b/tests/functional/workflow-state/05-output.t new file mode 100755 index 00000000000..d95bc179518 --- /dev/null +++ b/tests/functional/workflow-state/05-output.t @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# Test cylc workflow-state for outputs (as opposed to statuses) +. "$(dirname "$0")/test_header" + +set_test_number 2 + +install_workflow "${TEST_NAME_BASE}" output + +TEST_NAME="${TEST_NAME_BASE}-run" +workflow_run_ok "${TEST_NAME}" \ + cylc play --reference-test --debug --no-detach "${WORKFLOW_NAME}" + +TEST_NAME=${TEST_NAME_BASE}-cli-check +run_ok "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}//20100101T0000Z/t1:out1" --max-polls=1 + +purge diff --git a/tests/functional/workflow-state/06-format.t b/tests/functional/workflow-state/06-format.t index f43460cfbb4..3994822438f 100755 --- a/tests/functional/workflow-state/06-format.t +++ b/tests/functional/workflow-state/06-format.t @@ -14,13 +14,13 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . -#------------------------------------------------------------------------------- + # Test "cylc workflow-state" cycle point format conversion, when the target workflow # sets an explicit cycle point format, and the CLI does not. . "$(dirname "$0")/test_header" -#------------------------------------------------------------------------------- + set_test_number 5 -#------------------------------------------------------------------------------- + init_workflow "${TEST_NAME_BASE}" <<'__FLOW_CONFIG__' [scheduler] UTC mode = True @@ -33,23 +33,21 @@ init_workflow "${TEST_NAME_BASE}" <<'__FLOW_CONFIG__' [[foo]] script = true __FLOW_CONFIG__ -#------------------------------------------------------------------------------- + TEST_NAME="${TEST_NAME_BASE}-run" workflow_run_ok "${TEST_NAME}" cylc play --debug --no-detach "${WORKFLOW_NAME}" -#------------------------------------------------------------------------------- + TEST_NAME=${TEST_NAME_BASE}-cli-poll -run_ok "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}" -p 20100101T0000Z \ - --task=foo --status=succeeded +run_ok "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}//20100101T0000Z/foo:succeeded" --max-polls=1 contains_ok "${TEST_NAME}.stdout" <<__OUT__ -polling for 'succeeded': satisfied +2010-01-01/foo:succeeded __OUT__ -#------------------------------------------------------------------------------- + TEST_NAME=${TEST_NAME_BASE}-cli-dump -run_ok "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}" -p 20100101T0000Z +run_ok "${TEST_NAME}" cylc workflow-state --old-format "${WORKFLOW_NAME}//20100101T0000Z" --max-polls=1 contains_ok "${TEST_NAME}.stdout" <<__OUT__ foo, 2010-01-01, succeeded __OUT__ -#------------------------------------------------------------------------------- + purge -#------------------------------------------------------------------------------- -exit 0 + diff --git a/tests/functional/workflow-state/06a-noformat.t b/tests/functional/workflow-state/06a-noformat.t index 6bf800e27f2..eed64f6addd 100755 --- a/tests/functional/workflow-state/06a-noformat.t +++ b/tests/functional/workflow-state/06a-noformat.t @@ -14,14 +14,13 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . -#------------------------------------------------------------------------------- + # Test "cylc workflow-state" cycle point format conversion, when the target workflow # sets no explicit cycle point format, and the CLI does (the reverse of 06.t). - . "$(dirname "$0")/test_header" -#------------------------------------------------------------------------------- -set_test_number 5 -#------------------------------------------------------------------------------- + +set_test_number 3 + init_workflow "${TEST_NAME_BASE}" <<'__FLOW_CONFIG__' [scheduler] UTC mode = True @@ -34,23 +33,15 @@ init_workflow "${TEST_NAME_BASE}" <<'__FLOW_CONFIG__' [[foo]] script = true __FLOW_CONFIG__ -#------------------------------------------------------------------------------- + TEST_NAME="${TEST_NAME_BASE}-run" workflow_run_ok "${TEST_NAME}" cylc play --debug --no-detach "${WORKFLOW_NAME}" -#------------------------------------------------------------------------------- + TEST_NAME=${TEST_NAME_BASE}-cli-poll -run_ok "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}" -p 2010-01-01T00:00Z \ - --task=foo --status=succeeded -contains_ok "${TEST_NAME}.stdout" <<__OUT__ -polling for 'succeeded': satisfied -__OUT__ -#------------------------------------------------------------------------------- -TEST_NAME=${TEST_NAME_BASE}-cli-dump -run_ok "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}" -p 2010-01-01T00:00Z +run_ok "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}//2010-01-01T00+00" contains_ok "${TEST_NAME}.stdout" <<__OUT__ -foo, 20100101T0000Z, succeeded +20100101T0000Z/foo:succeeded __OUT__ -#------------------------------------------------------------------------------- + purge -#------------------------------------------------------------------------------- -exit 0 + diff --git a/tests/functional/workflow-state/07-message2.t b/tests/functional/workflow-state/07-message2.t index cebdeecb13f..ef666714220 100755 --- a/tests/functional/workflow-state/07-message2.t +++ b/tests/functional/workflow-state/07-message2.t @@ -15,7 +15,8 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -# Test workflow-state message query on a waiting task - GitHub #2440. +# Originally (Cylc 7): test workflow-state query on a waiting task - GitHub #2440. +# Now (Cylc 8): test result of a failed workflow-state query. . "$(dirname "$0")/test_header" set_test_number 4 @@ -24,12 +25,12 @@ install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" run_ok "${TEST_NAME_BASE}-val" cylc validate "${WORKFLOW_NAME}" -workflow_run_ok "${TEST_NAME_BASE}-run" cylc play --debug --no-detach "${WORKFLOW_NAME}" +workflow_run_ok "${TEST_NAME_BASE}-run" \ + cylc play --debug --no-detach "${WORKFLOW_NAME}" TEST_NAME=${TEST_NAME_BASE}-query -run_fail "${TEST_NAME}" cylc workflow-state \ - "${WORKFLOW_NAME}" -p 2013 -t foo --max-polls=1 -m "the quick brown fox" +run_fail "${TEST_NAME}" cylc workflow-state "${WORKFLOW_NAME}//2013/foo:x" --max-polls=1 -grep_ok "ERROR: condition not satisfied" "${TEST_NAME}.stderr" +grep_ok "failed after 1 polls" "${TEST_NAME}.stderr" purge diff --git a/tests/functional/workflow-state/08-integer.t b/tests/functional/workflow-state/08-integer.t new file mode 100755 index 00000000000..ff95c81a27b --- /dev/null +++ b/tests/functional/workflow-state/08-integer.t @@ -0,0 +1,82 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +. "$(dirname "$0")/test_header" + +set_test_number 15 + +install_workflow "${TEST_NAME_BASE}" integer + +# run one cycle +TEST_NAME="${TEST_NAME_BASE}_run_1" +workflow_run_ok "${TEST_NAME}" cylc play --debug --no-detach --stopcp=1 "${WORKFLOW_NAME}" + +# too many args +TEST_NAME="${TEST_NAME_BASE}_cl_error" +run_fail "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}-a" "${WORKFLOW_NAME}-b" + +TEST_NAME="${TEST_NAME_BASE}_check_1_status" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2/foo:waiting +1/foo:succeeded +1/bar:succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_check_1_outputs" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 --triggers "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +1/foo:{'submitted': 'submitted', 'started': 'started', 'succeeded': 'succeeded', 'x': 'hello'} +2/foo:{} +1/bar:{'submitted': 'submitted', 'started': 'started', 'succeeded': 'succeeded'} +__END__ + +TEST_NAME="${TEST_NAME_BASE}_poll_fail" +run_fail "${TEST_NAME}" cylc workflow-state --max-polls=2 --interval=1 "${WORKFLOW_NAME}//2/foo:succeeded" + +contains_ok "${TEST_NAME}.stderr" <<__END__ +ERROR - failed after 2 polls +__END__ + +# finish the run +TEST_NAME="${TEST_NAME_BASE}_run_2" +workflow_run_ok "${TEST_NAME}" cylc play --debug --no-detach "${WORKFLOW_NAME}" + +TEST_NAME="${TEST_NAME_BASE}_poll_succeed" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}//2/foo:succeeded" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2/foo:succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_int_offset" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}//1/foo:succeeded" --offset=P1 + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2/foo:succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_wildcard_offset" +run_fail "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}//*/foo:succeeded" --offset=P1 + +contains_ok "${TEST_NAME}.stderr" <<__END__ +InputError: Cycle point "*" is not compatible with an offset. +__END__ + +purge diff --git a/tests/functional/workflow-state/09-datetime.t b/tests/functional/workflow-state/09-datetime.t new file mode 100755 index 00000000000..d6e82a6a6ac --- /dev/null +++ b/tests/functional/workflow-state/09-datetime.t @@ -0,0 +1,121 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +. "$(dirname "$0")/test_header" + +set_test_number 24 + +install_workflow "${TEST_NAME_BASE}" datetime + +# run one cycle +TEST_NAME="${TEST_NAME_BASE}_run_1" +workflow_run_ok "${TEST_NAME}" cylc play --debug --no-detach --stopcp=2051 "${WORKFLOW_NAME}" + +TEST_NAME="${TEST_NAME_BASE}_check_1_status" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2052/foo:waiting +2051/foo:succeeded +2051/bar:succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_check_1_status_old_fmt" +run_ok "${TEST_NAME}" cylc workflow-state --old-format --max-polls=1 "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +foo, 2052, waiting +foo, 2051, succeeded +bar, 2051, succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_check_1_outputs" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 --triggers "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2051/foo:{'submitted': 'submitted', 'started': 'started', 'succeeded': 'succeeded', 'x': 'hello'} +2052/foo:{} +2051/bar:{'submitted': 'submitted', 'started': 'started', 'succeeded': 'succeeded'} +__END__ + +TEST_NAME="${TEST_NAME_BASE}_poll_fail" +run_fail "${TEST_NAME}" cylc workflow-state --max-polls=2 --interval=1 "${WORKFLOW_NAME}//2052/foo:succeeded" + +contains_ok "${TEST_NAME}.stderr" <<__END__ +ERROR - failed after 2 polls +__END__ + +# finish the run +TEST_NAME="${TEST_NAME_BASE}_run_2" +workflow_run_ok "${TEST_NAME}" cylc play --debug --no-detach "${WORKFLOW_NAME}" + +TEST_NAME="${TEST_NAME_BASE}_check_1_status_2" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2051/foo:succeeded +2052/foo:succeeded +2051/bar:succeeded +2052/bar:succeeded +2052/foo:succeeded(flows=2) +2052/bar:succeeded(flows=2) +__END__ + +TEST_NAME="${TEST_NAME_BASE}_check_1_status_3" +run_ok "${TEST_NAME}" cylc workflow-state --flow=2 --max-polls=1 "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2052/foo:succeeded(flows=2) +2052/bar:succeeded(flows=2) +__END__ + +TEST_NAME="${TEST_NAME_BASE}_check_1_wildcard" +run_ok "${TEST_NAME}" cylc workflow-state --flow=1 --max-polls=1 "${WORKFLOW_NAME}//*/foo" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2051/foo:succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_poll_succeed" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}//2052/foo:succeeded" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2052/foo:succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_datetime_offset" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}//2051/foo:succeeded" --offset=P1Y + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2052/foo:succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_datetime_format" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}//20510101T0000Z/foo:succeeded" --offset=P1Y + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2052/foo:succeeded +__END__ + +TEST_NAME="${TEST_NAME_BASE}_bad_point" +run_fail "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}//205/foo:succeeded" + +contains_ok "${TEST_NAME}.stderr" <<__END__ +InputError: Cycle point "205" is not compatible with DB point format "CCYY" +__END__ + +purge diff --git a/tests/functional/workflow-state/10-backcompat.t b/tests/functional/workflow-state/10-backcompat.t new file mode 100755 index 00000000000..8a026c97c1e --- /dev/null +++ b/tests/functional/workflow-state/10-backcompat.t @@ -0,0 +1,54 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +. "$(dirname "$0")/test_header" + +set_test_number 8 + +install_workflow "${TEST_NAME_BASE}" backcompat + +# create Cylc 7 DB +run_ok "create-db" sqlite3 "${WORKFLOW_RUN_DIR}/log/db" < schema-1.sql + +TEST_NAME="${TEST_NAME_BASE}_compat_1" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2051/foo:succeeded +2051/bar:succeeded +__END__ + +# recreate Cylc 7 DB with one NULL status +rm "${WORKFLOW_RUN_DIR}/log/db" +run_ok "create-db" sqlite3 "${WORKFLOW_RUN_DIR}/log/db" < schema-2.sql + +TEST_NAME="${TEST_NAME_BASE}_compat_2" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2051/foo:succeeded +__END__ + +# Cylc 7 DB only contains custom outputs +TEST_NAME="${TEST_NAME_BASE}_outputs" +run_ok "${TEST_NAME}" cylc workflow-state --max-polls=1 --messages "${WORKFLOW_NAME}" + +contains_ok "${TEST_NAME}.stdout" <<__END__ +2051/foo:{'x': 'the quick brown fox'} +__END__ + +purge diff --git a/tests/functional/workflow-state/11-multi.t b/tests/functional/workflow-state/11-multi.t new file mode 100644 index 00000000000..a80b61f2016 --- /dev/null +++ b/tests/functional/workflow-state/11-multi.t @@ -0,0 +1,130 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- +# Test all kinds of workflow-state DB checking. + +# shellcheck disable=SC2086 + +. "$(dirname "$0")/test_header" + +set_test_number 42 + +install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" + +# Create Cylc 7, 8 (pre-8.3.0), and 8(8.3.0+) DBs for workflow-state checking. +DBDIR="${WORKFLOW_RUN_DIR}/dbs" +for x in c7 c8a c8b; do + mkdir -p "${DBDIR}/${x}/log" + sqlite3 "${DBDIR}/${x}/log/db" < "${x}.sql" +done + +run_ok "${TEST_NAME_BASE}-validate" \ + cylc validate "${WORKFLOW_NAME}" --set="ALT=\"${DBDIR}\"" + +grep_ok \ + "WARNING - (8.3.0) Deprecated function signature used for workflow_state xtrigger was automatically upgraded" \ + "${TEST_NAME_BASE}-validate.stderr" + +TEST_NAME="${TEST_NAME_BASE}-run" +workflow_run_ok "${TEST_NAME}" \ + cylc play "${WORKFLOW_NAME}" --set="ALT=\"${DBDIR}\"" \ + --reference-test --debug --no-detach + +# Single poll. +CMD="cylc workflow-state --run-dir=$DBDIR --max-polls=1" + +# Content of the c8b DB: +# "select * from task_outputs" +# 1|foo|[1]|{"submitted": "submitted", "started": "started", "succeeded": "succeeded", "x": "the quick brown"} +# "select * from task_states" +# foo|1|[1]|2024-06-05T16:34:02+12:00|2024-06-05T16:34:04+12:00|1|succeeded|0|0 + +#--------------- +# Test the new-format command line (pre-8.3.0). +T=${TEST_NAME_BASE}-cli-c8b +run_ok "${T}-1" $CMD c8b +run_ok "${T}-2" $CMD c8b//1 +run_ok "${T}-3" $CMD c8b//1/foo +run_ok "${T}-4" $CMD c8b//1/foo:succeeded +run_ok "${T}-5" $CMD "c8b//1/foo:the quick brown" --messages +run_ok "${T}-6" $CMD "c8b//1/foo:x" --triggers +run_ok "${T}-7" $CMD "c8b//1/foo:x" # default to trigger if not a status +run_ok "${T}-8" $CMD c8b//1 +run_ok "${T}-9" $CMD c8b//1:succeeded + +run_fail "${T}-3" $CMD c8b//1/foo:failed +run_fail "${T}-5" $CMD "c8b//1/foo:the quick brown" --triggers +run_fail "${T}-5" $CMD "c8b//1/foo:x" --messages +run_fail "${T}-1" $CMD c8b//1:failed +run_fail "${T}-1" $CMD c8b//2 +run_fail "${T}-1" $CMD c8b//2:failed + +#--------------- +T=${TEST_NAME_BASE}-cli-c8a +run_ok "${T}-1" $CMD "c8a//1/foo:the quick brown" --messages +run_ok "${T}-2" $CMD "c8a//1/foo:the quick brown" --triggers # OK for 8.0 <= 8.3 +run_fail "${T}-3" $CMD "c8a//1/foo:x" --triggers # not possible for 8.0 <= 8.3 + +#--------------- +T=${TEST_NAME_BASE}-cli-c7 +run_ok "${T}-1" $CMD "c7//1/foo:the quick brown" --messages +run_fail "${T}-2" $CMD "c7//1/foo:the quick brown" --triggers +run_ok "${T}-3" $CMD "c7//1/foo:x" --triggers + +#--------------- +# Test the old-format command line (8.3.0+). +T=${TEST_NAME_BASE}-cli-8b-compat +run_ok "${T}-1" $CMD c8b +run_ok "${T}-2" $CMD c8b --point=1 +run_ok "${T}-3" $CMD c8b --point=1 --task=foo +run_ok "${T}-4" $CMD c8b --point=1 --task=foo --status=succeeded +run_ok "${T}-5" $CMD c8b --point=1 --task=foo --message="the quick brown" +run_ok "${T}-6" $CMD c8b --point=1 --task=foo --output="the quick brown" + +run_fail "${T}-7" $CMD c8b --point=1 --task=foo --status=failed +run_fail "${T}-8" $CMD c8b --point=1 --task=foo --message="x" +run_fail "${T}-9" $CMD c8b --point=1 --task=foo --output="x" +run_fail "${T}-10" $CMD c8b --point=2 +run_fail "${T}-11" $CMD c8b --point=2 --task=foo --status="succeeded" + +#--------------- +T=${TEST_NAME_BASE}-bad-cli + +TEST_NAME="${T}-1" +run_fail "$TEST_NAME" $CMD c8b --status=succeeded --message="the quick brown" +cmp_ok "${TEST_NAME}.stderr" <<__ERR__ +InputError: set --status or --message, not both. +__ERR__ + +TEST_NAME="${T}-2" +run_fail "$TEST_NAME" $CMD c8b --task-point --point=1 +cmp_ok "${TEST_NAME}.stderr" <<__ERR__ +InputError: set --task-point or --point=CYCLE, not both. +__ERR__ + + +TEST_NAME="${T}-3" +run_fail "$TEST_NAME" $CMD c8b --task-point +cmp_ok "${TEST_NAME}.stderr" << "__ERR__" +InputError: --task-point: $CYLC_TASK_CYCLE_POINT is not defined +__ERR__ + +export CYLC_TASK_CYCLE_POINT=1 +TEST_NAME="${T}-3" +run_ok "$TEST_NAME" $CMD c8b --task-point + +purge diff --git a/tests/functional/workflow-state/11-multi/c7.sql b/tests/functional/workflow-state/11-multi/c7.sql new file mode 100644 index 00000000000..e912d533992 --- /dev/null +++ b/tests/functional/workflow-state/11-multi/c7.sql @@ -0,0 +1,39 @@ +PRAGMA foreign_keys=OFF; +BEGIN TRANSACTION; +CREATE TABLE suite_params(key TEXT, value TEXT, PRIMARY KEY(key)); +INSERT INTO suite_params VALUES('uuid_str','814ef90e-31a2-45e7-904b-fb3c6dcb87a9'); +INSERT INTO suite_params VALUES('run_mode','live'); +INSERT INTO suite_params VALUES('cylc_version','7.9.9'); +INSERT INTO suite_params VALUES('UTC_mode','0'); +INSERT INTO suite_params VALUES('cycle_point_tz','+1200'); +CREATE TABLE task_jobs(cycle TEXT, name TEXT, submit_num INTEGER, is_manual_submit INTEGER, try_num INTEGER, time_submit TEXT, time_submit_exit TEXT, submit_status INTEGER, time_run TEXT, time_run_exit TEXT, run_signal TEXT, run_status INTEGER, user_at_host TEXT, batch_sys_name TEXT, batch_sys_job_id TEXT, PRIMARY KEY(cycle, name, submit_num)); +INSERT INTO task_jobs VALUES('1','foo',1,0,1,'2024-06-05T16:31:01+12:00','2024-06-05T16:31:02+12:00',0,'2024-06-05T16:31:02+12:00','2024-06-05T16:31:02+12:00',NULL,0,'NIWA-1022450.niwa.local','background','19328'); +CREATE TABLE task_late_flags(cycle TEXT, name TEXT, value INTEGER, PRIMARY KEY(cycle, name)); +CREATE TABLE broadcast_states_checkpoints(id INTEGER, point TEXT, namespace TEXT, key TEXT, value TEXT, PRIMARY KEY(id, point, namespace, key)); +CREATE TABLE checkpoint_id(id INTEGER, time TEXT, event TEXT, PRIMARY KEY(id)); +INSERT INTO checkpoint_id VALUES(0,'2024-06-05T16:31:02+12:00','latest'); +CREATE TABLE inheritance(namespace TEXT, inheritance TEXT, PRIMARY KEY(namespace)); +INSERT INTO inheritance VALUES('root','["root"]'); +INSERT INTO inheritance VALUES('foo','["foo", "root"]'); +CREATE TABLE suite_params_checkpoints(id INTEGER, key TEXT, value TEXT, PRIMARY KEY(id, key)); +CREATE TABLE task_pool_checkpoints(id INTEGER, cycle TEXT, name TEXT, spawned INTEGER, status TEXT, hold_swap TEXT, PRIMARY KEY(id, cycle, name)); +CREATE TABLE task_outputs(cycle TEXT, name TEXT, outputs TEXT, PRIMARY KEY(cycle, name)); +INSERT INTO task_outputs VALUES('1','foo','{"x": "the quick brown"}'); +CREATE TABLE broadcast_states(point TEXT, namespace TEXT, key TEXT, value TEXT, PRIMARY KEY(point, namespace, key)); +CREATE TABLE task_timeout_timers(cycle TEXT, name TEXT, timeout REAL, PRIMARY KEY(cycle, name)); +CREATE TABLE task_states(name TEXT, cycle TEXT, time_created TEXT, time_updated TEXT, submit_num INTEGER, status TEXT, PRIMARY KEY(name, cycle)); +INSERT INTO task_states VALUES('foo','1','2024-06-05T16:31:01+12:00','2024-06-05T16:31:02+12:00',1,'succeeded'); +CREATE TABLE broadcast_events(time TEXT, change TEXT, point TEXT, namespace TEXT, key TEXT, value TEXT); +CREATE TABLE task_events(name TEXT, cycle TEXT, time TEXT, submit_num INTEGER, event TEXT, message TEXT); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:31:02+12:00',1,'submitted',''); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:31:02+12:00',1,'started',''); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:31:02+12:00',1,'x','the quick brown'); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:31:02+12:00',1,'succeeded',''); +CREATE TABLE suite_template_vars(key TEXT, value TEXT, PRIMARY KEY(key)); +CREATE TABLE task_pool(cycle TEXT, name TEXT, spawned INTEGER, status TEXT, hold_swap TEXT, PRIMARY KEY(cycle, name)); +INSERT INTO task_pool VALUES('1','foo',1,'succeeded',NULL); +CREATE TABLE xtriggers(signature TEXT, results TEXT, PRIMARY KEY(signature)); +CREATE TABLE task_action_timers(cycle TEXT, name TEXT, ctx_key TEXT, ctx TEXT, delays TEXT, num INTEGER, delay TEXT, timeout TEXT, PRIMARY KEY(cycle, name, ctx_key)); +INSERT INTO task_action_timers VALUES('1','foo','["try_timers", "retrying"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('1','foo','["try_timers", "submit-retrying"]','null','[]',0,NULL,NULL); +COMMIT; diff --git a/tests/functional/workflow-state/11-multi/c8a.sql b/tests/functional/workflow-state/11-multi/c8a.sql new file mode 100644 index 00000000000..3335d4dd3aa --- /dev/null +++ b/tests/functional/workflow-state/11-multi/c8a.sql @@ -0,0 +1,48 @@ +PRAGMA foreign_keys=OFF; +BEGIN TRANSACTION; +CREATE TABLE absolute_outputs(cycle TEXT, name TEXT, output TEXT); +CREATE TABLE broadcast_events(time TEXT, change TEXT, point TEXT, namespace TEXT, key TEXT, value TEXT); +CREATE TABLE broadcast_states(point TEXT, namespace TEXT, key TEXT, value TEXT, PRIMARY KEY(point, namespace, key)); +CREATE TABLE inheritance(namespace TEXT, inheritance TEXT, PRIMARY KEY(namespace)); +INSERT INTO inheritance VALUES('root','["root"]'); +INSERT INTO inheritance VALUES('foo','["foo", "root"]'); +CREATE TABLE task_action_timers(cycle TEXT, name TEXT, ctx_key TEXT, ctx TEXT, delays TEXT, num INTEGER, delay TEXT, timeout TEXT, PRIMARY KEY(cycle, name, ctx_key)); +INSERT INTO task_action_timers VALUES('1','foo','"poll_timer"','["tuple", [[1, "running"]]]','[900.0]',1,'900.0','1717563116.69952'); +INSERT INTO task_action_timers VALUES('1','foo','["try_timers", "submission-retry"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('1','foo','["try_timers", "execution-retry"]','null','[]',0,NULL,NULL); +CREATE TABLE task_events(name TEXT, cycle TEXT, time TEXT, submit_num INTEGER, event TEXT, message TEXT); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:36:56+12:00',1,'submitted',''); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:36:56+12:00',1,'started',''); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:36:56+12:00',1,'x','the quick brown'); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:36:57+12:00',1,'succeeded',''); +CREATE TABLE task_jobs(cycle TEXT, name TEXT, submit_num INTEGER, flow_nums TEXT, is_manual_submit INTEGER, try_num INTEGER, time_submit TEXT, time_submit_exit TEXT, submit_status INTEGER, time_run TEXT, time_run_exit TEXT, run_signal TEXT, run_status INTEGER, platform_name TEXT, job_runner_name TEXT, job_id TEXT, PRIMARY KEY(cycle, name, submit_num)); +INSERT INTO task_jobs VALUES('1','foo',1,'[1]',0,1,'2024-06-05T16:36:55+12:00','2024-06-05T16:36:56+12:00',0,'2024-06-05T16:36:56+12:00','2024-06-05T16:36:56+12:00',NULL,0,'localhost','background','21511'); +CREATE TABLE task_late_flags(cycle TEXT, name TEXT, value INTEGER, PRIMARY KEY(cycle, name)); +CREATE TABLE task_outputs(cycle TEXT, name TEXT, flow_nums TEXT, outputs TEXT, PRIMARY KEY(cycle, name, flow_nums)); +INSERT INTO task_outputs VALUES('1','foo','[1]','["submitted", "started", "succeeded", "the quick brown"]'); +CREATE TABLE task_pool(cycle TEXT, name TEXT, flow_nums TEXT, status TEXT, is_held INTEGER, PRIMARY KEY(cycle, name, flow_nums)); +CREATE TABLE task_prerequisites(cycle TEXT, name TEXT, flow_nums TEXT, prereq_name TEXT, prereq_cycle TEXT, prereq_output TEXT, satisfied TEXT, PRIMARY KEY(cycle, name, flow_nums, prereq_name, prereq_cycle, prereq_output)); +CREATE TABLE task_states(name TEXT, cycle TEXT, flow_nums TEXT, time_created TEXT, time_updated TEXT, submit_num INTEGER, status TEXT, flow_wait INTEGER, is_manual_submit INTEGER, PRIMARY KEY(name, cycle, flow_nums)); +INSERT INTO task_states VALUES('foo','1','[1]','2024-06-05T16:36:55+12:00','2024-06-05T16:36:57+12:00',1,'succeeded',0,0); +CREATE TABLE task_timeout_timers(cycle TEXT, name TEXT, timeout REAL, PRIMARY KEY(cycle, name)); +CREATE TABLE tasks_to_hold(name TEXT, cycle TEXT); +CREATE TABLE workflow_flows(flow_num INTEGER, start_time TEXT, description TEXT, PRIMARY KEY(flow_num)); +INSERT INTO workflow_flows VALUES(1,'2024-06-05T16:36:55','original flow from 1'); +CREATE TABLE workflow_params(key TEXT, value TEXT, PRIMARY KEY(key)); +INSERT INTO workflow_params VALUES('uuid_str','cabb2bd8-bb36-4c7a-9c51-d2b1d456bc4e'); +INSERT INTO workflow_params VALUES('cylc_version','8.3.0.dev'); +INSERT INTO workflow_params VALUES('UTC_mode','0'); +INSERT INTO workflow_params VALUES('n_restart','0'); +INSERT INTO workflow_params VALUES('cycle_point_format',NULL); +INSERT INTO workflow_params VALUES('is_paused','0'); +INSERT INTO workflow_params VALUES('stop_clock_time',NULL); +INSERT INTO workflow_params VALUES('stop_task',NULL); +INSERT INTO workflow_params VALUES('icp',NULL); +INSERT INTO workflow_params VALUES('fcp',NULL); +INSERT INTO workflow_params VALUES('startcp',NULL); +INSERT INTO workflow_params VALUES('stopcp',NULL); +INSERT INTO workflow_params VALUES('run_mode',NULL); +INSERT INTO workflow_params VALUES('cycle_point_tz','+1200'); +CREATE TABLE workflow_template_vars(key TEXT, value TEXT, PRIMARY KEY(key)); +CREATE TABLE xtriggers(signature TEXT, results TEXT, PRIMARY KEY(signature)); +COMMIT; diff --git a/tests/functional/workflow-state/11-multi/c8b.sql b/tests/functional/workflow-state/11-multi/c8b.sql new file mode 100644 index 00000000000..ca8fe74fa6f --- /dev/null +++ b/tests/functional/workflow-state/11-multi/c8b.sql @@ -0,0 +1,48 @@ +PRAGMA foreign_keys=OFF; +BEGIN TRANSACTION; +CREATE TABLE absolute_outputs(cycle TEXT, name TEXT, output TEXT); +CREATE TABLE broadcast_events(time TEXT, change TEXT, point TEXT, namespace TEXT, key TEXT, value TEXT); +CREATE TABLE broadcast_states(point TEXT, namespace TEXT, key TEXT, value TEXT, PRIMARY KEY(point, namespace, key)); +CREATE TABLE inheritance(namespace TEXT, inheritance TEXT, PRIMARY KEY(namespace)); +INSERT INTO inheritance VALUES('root','["root"]'); +INSERT INTO inheritance VALUES('foo','["foo", "root"]'); +CREATE TABLE task_action_timers(cycle TEXT, name TEXT, ctx_key TEXT, ctx TEXT, delays TEXT, num INTEGER, delay TEXT, timeout TEXT, PRIMARY KEY(cycle, name, ctx_key)); +INSERT INTO task_action_timers VALUES('1','foo','"poll_timer"','["tuple", [[1, "running"]]]','[900.0]',1,'900.0','1717562943.77014'); +INSERT INTO task_action_timers VALUES('1','foo','["try_timers", "submission-retry"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('1','foo','["try_timers", "execution-retry"]','null','[]',0,NULL,NULL); +CREATE TABLE task_events(name TEXT, cycle TEXT, time TEXT, submit_num INTEGER, event TEXT, message TEXT); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:34:03+12:00',1,'submitted',''); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:34:03+12:00',1,'started',''); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:34:03+12:00',1,'x','the quick brown'); +INSERT INTO task_events VALUES('foo','1','2024-06-05T16:34:04+12:00',1,'succeeded',''); +CREATE TABLE task_jobs(cycle TEXT, name TEXT, submit_num INTEGER, flow_nums TEXT, is_manual_submit INTEGER, try_num INTEGER, time_submit TEXT, time_submit_exit TEXT, submit_status INTEGER, time_run TEXT, time_run_exit TEXT, run_signal TEXT, run_status INTEGER, platform_name TEXT, job_runner_name TEXT, job_id TEXT, PRIMARY KEY(cycle, name, submit_num)); +INSERT INTO task_jobs VALUES('1','foo',1,'[1]',0,1,'2024-06-05T16:34:02+12:00','2024-06-05T16:34:03+12:00',0,'2024-06-05T16:34:03+12:00','2024-06-05T16:34:03+12:00',NULL,0,'localhost','background','20985'); +CREATE TABLE task_late_flags(cycle TEXT, name TEXT, value INTEGER, PRIMARY KEY(cycle, name)); +CREATE TABLE task_outputs(cycle TEXT, name TEXT, flow_nums TEXT, outputs TEXT, PRIMARY KEY(cycle, name, flow_nums)); +INSERT INTO task_outputs VALUES('1','foo','[1]','{"submitted": "submitted", "started": "started", "succeeded": "succeeded", "x": "the quick brown"}'); +CREATE TABLE task_pool(cycle TEXT, name TEXT, flow_nums TEXT, status TEXT, is_held INTEGER, PRIMARY KEY(cycle, name, flow_nums)); +CREATE TABLE task_prerequisites(cycle TEXT, name TEXT, flow_nums TEXT, prereq_name TEXT, prereq_cycle TEXT, prereq_output TEXT, satisfied TEXT, PRIMARY KEY(cycle, name, flow_nums, prereq_name, prereq_cycle, prereq_output)); +CREATE TABLE task_states(name TEXT, cycle TEXT, flow_nums TEXT, time_created TEXT, time_updated TEXT, submit_num INTEGER, status TEXT, flow_wait INTEGER, is_manual_submit INTEGER, PRIMARY KEY(name, cycle, flow_nums)); +INSERT INTO task_states VALUES('foo','1','[1]','2024-06-05T16:34:02+12:00','2024-06-05T16:34:04+12:00',1,'succeeded',0,0); +CREATE TABLE task_timeout_timers(cycle TEXT, name TEXT, timeout REAL, PRIMARY KEY(cycle, name)); +CREATE TABLE tasks_to_hold(name TEXT, cycle TEXT); +CREATE TABLE workflow_flows(flow_num INTEGER, start_time TEXT, description TEXT, PRIMARY KEY(flow_num)); +INSERT INTO workflow_flows VALUES(1,'2024-06-05T16:34:02','original flow from 1'); +CREATE TABLE workflow_params(key TEXT, value TEXT, PRIMARY KEY(key)); +INSERT INTO workflow_params VALUES('uuid_str','4185a45a-8faa-491f-ad35-2d221e780efa'); +INSERT INTO workflow_params VALUES('cylc_version','8.3.0.dev'); +INSERT INTO workflow_params VALUES('UTC_mode','0'); +INSERT INTO workflow_params VALUES('n_restart','0'); +INSERT INTO workflow_params VALUES('cycle_point_format',NULL); +INSERT INTO workflow_params VALUES('is_paused','0'); +INSERT INTO workflow_params VALUES('stop_clock_time',NULL); +INSERT INTO workflow_params VALUES('stop_task',NULL); +INSERT INTO workflow_params VALUES('icp',NULL); +INSERT INTO workflow_params VALUES('fcp',NULL); +INSERT INTO workflow_params VALUES('startcp',NULL); +INSERT INTO workflow_params VALUES('stopcp',NULL); +INSERT INTO workflow_params VALUES('run_mode',NULL); +INSERT INTO workflow_params VALUES('cycle_point_tz','+1200'); +CREATE TABLE workflow_template_vars(key TEXT, value TEXT, PRIMARY KEY(key)); +CREATE TABLE xtriggers(signature TEXT, results TEXT, PRIMARY KEY(signature)); +COMMIT; diff --git a/tests/functional/workflow-state/11-multi/flow.cylc b/tests/functional/workflow-state/11-multi/flow.cylc new file mode 100644 index 00000000000..a0ab61e9312 --- /dev/null +++ b/tests/functional/workflow-state/11-multi/flow.cylc @@ -0,0 +1,69 @@ +#!Jinja2 + +{# alt-cylc-run-dir default for easy validation #} +{% set ALT = ALT | default("alt") %} + +[scheduling] + cycling mode = integer + initial cycle point = 1 + final cycle point = 2 + [[xtriggers]] + # Cylc 7 back compat + z1 = suite_state(c7, foo, 1, offset=P0, cylc_run_dir={{ALT}}):PT1S # status=succeeded + z2 = suite_state(c7, foo, 1, offset=P0, message="the quick brown", cylc_run_dir={{ALT}}):PT1S + + # Cylc 7 xtrigger, Cylc 8 DB + a1 = suite_state(c8b, foo, 1, offset=P0, cylc_run_dir={{ALT}}):PT1S # status=succeeded + a2 = suite_state(c8b, foo, 1, offset=P0, message="the quick brown", cylc_run_dir={{ALT}}):PT1S + + # Cylc 8 back compat (pre-8.3.0) + b1 = workflow_state(c8a, foo, 1, offset=P0, status=succeeded, cylc_run_dir={{ALT}}):PT1S + b2 = workflow_state(c8a, foo, 1, offset=P0, message="the quick brown", cylc_run_dir={{ALT}}):PT1S + + # Cylc 8 new (from 8.3.0) + c1 = workflow_state(c8b//1/foo, offset=P0, alt_cylc_run_dir={{ALT}}):PT1S + c2 = workflow_state(c8b//1/foo:succeeded, offset=P0, alt_cylc_run_dir={{ALT}}):PT1S + c3 = workflow_state(c8b//1/foo:x, offset=P0, alt_cylc_run_dir={{ALT}}):PT1S + c4 = workflow_state(c8b//1/foo:"the quick brown", offset=P0, is_message=True, alt_cylc_run_dir={{ALT}}):PT1S + + [[graph]] + R1 = """ + # Deprecated workflow-state polling tasks. + # (does not support %(suite_name)s templates or offsets + # or output triggers - just messages) + + # status + bar1 => g1 + bar2 => g2 + + # output + baz2 => g4 # message given in task definition + qux2 => g7 # message given in task definition + + @z1 => x1 + @z2 => x2 + + @a1 => f1 + @a2 => f2 + + @b1 => f3 + @b2 => f4 + + @c1 => f5 + @c2 => f6 + @c3 => f7 + + """ +[runtime] + [[bar1, bar2]] + [[[workflow state polling]]] + alt-cylc-run-dir = {{ALT}} + + [[qux2, baz2]] + [[[workflow state polling]]] + message = "the quick brown" + alt-cylc-run-dir = {{ALT}} + + [[x1, x2]] + [[f1, f2, f3, f4, f5, f6, f7]] + [[g1, g2, g4, g7]] diff --git a/tests/functional/workflow-state/11-multi/reference.log b/tests/functional/workflow-state/11-multi/reference.log new file mode 100644 index 00000000000..5f1e79866b6 --- /dev/null +++ b/tests/functional/workflow-state/11-multi/reference.log @@ -0,0 +1,17 @@ +1/bar1 -triggered off [] in flow 1 +1/qux2 -triggered off [] in flow 1 +1/bar2 -triggered off [] in flow 1 +1/baz2 -triggered off [] in flow 1 +1/f4 -triggered off [] in flow 1 +1/f1 -triggered off [] in flow 1 +1/f2 -triggered off [] in flow 1 +1/f3 -triggered off [] in flow 1 +1/f5 -triggered off [] in flow 1 +1/x1 -triggered off [] in flow 1 +1/f6 -triggered off [] in flow 1 +1/f7 -triggered off [] in flow 1 +1/x2 -triggered off [] in flow 1 +1/g4 -triggered off ['1/baz2'] in flow 1 +1/g2 -triggered off ['1/bar2'] in flow 1 +1/g7 -triggered off ['1/qux2'] in flow 1 +1/g1 -triggered off ['1/bar1'] in flow 1 diff --git a/tests/functional/workflow-state/11-multi/upstream/suite.rc b/tests/functional/workflow-state/11-multi/upstream/suite.rc new file mode 100644 index 00000000000..250e0655b7d --- /dev/null +++ b/tests/functional/workflow-state/11-multi/upstream/suite.rc @@ -0,0 +1,17 @@ +# Run this with Cylc 7, 8 (pre-8.3.0), and 8 (8.3.0+) +# to generate DBs for workflow state checks. +# (The task_outputs table is different in each case). + +[scheduling] + cycling mode = integer + initial cycle point = 1 + [[dependencies]] + [[[R1]]] + graph = """ + foo + """ +[runtime] + [[foo]] + script = "cylc message - 'the quick brown'" + [[[outputs]]] + x = "the quick brown" diff --git a/tests/functional/workflow-state/backcompat/schema-1.sql b/tests/functional/workflow-state/backcompat/schema-1.sql new file mode 100644 index 00000000000..2375c9fd0b1 --- /dev/null +++ b/tests/functional/workflow-state/backcompat/schema-1.sql @@ -0,0 +1,49 @@ +PRAGMA foreign_keys=OFF; +BEGIN TRANSACTION; +CREATE TABLE suite_params(key TEXT, value TEXT, PRIMARY KEY(key)); +INSERT INTO suite_params VALUES('uuid_str','0d0bf7e8-4543-4aeb-8bc6-397e3a03ee19'); +INSERT INTO suite_params VALUES('run_mode','live'); +INSERT INTO suite_params VALUES('cylc_version','7.9.9'); +INSERT INTO suite_params VALUES('UTC_mode','0'); +INSERT INTO suite_params VALUES('cycle_point_format','CCYY'); +INSERT INTO suite_params VALUES('cycle_point_tz','+1200'); +CREATE TABLE task_jobs(cycle TEXT, name TEXT, submit_num INTEGER, is_manual_submit INTEGER, try_num INTEGER, time_submit TEXT, time_submit_exit TEXT, submit_status INTEGER, time_run TEXT, time_run_exit TEXT, run_signal TEXT, run_status INTEGER, user_at_host TEXT, batch_sys_name TEXT, batch_sys_job_id TEXT, PRIMARY KEY(cycle, name, submit_num)); +INSERT INTO task_jobs VALUES('2051','foo',1,0,1,'2024-05-30T14:11:40+12:00','2024-05-30T14:11:40+12:00',0,'2024-05-30T14:11:40+12:00','2024-05-30T14:11:40+12:00',NULL,0,'NIWA-1022450.niwa.local','background','12272'); +INSERT INTO task_jobs VALUES('2051','bar',1,0,1,'2024-05-30T14:11:42+12:00','2024-05-30T14:11:42+12:00',0,'2024-05-30T14:11:42+12:00','2024-05-30T14:11:42+12:00',NULL,0,'NIWA-1022450.niwa.local','background','12327'); +CREATE TABLE task_late_flags(cycle TEXT, name TEXT, value INTEGER, PRIMARY KEY(cycle, name)); +CREATE TABLE broadcast_states_checkpoints(id INTEGER, point TEXT, namespace TEXT, key TEXT, value TEXT, PRIMARY KEY(id, point, namespace, key)); +CREATE TABLE checkpoint_id(id INTEGER, time TEXT, event TEXT, PRIMARY KEY(id)); +INSERT INTO checkpoint_id VALUES(0,'2024-05-30T14:11:43+12:00','latest'); +CREATE TABLE inheritance(namespace TEXT, inheritance TEXT, PRIMARY KEY(namespace)); +INSERT INTO inheritance VALUES('root','["root"]'); +INSERT INTO inheritance VALUES('foo','["foo", "root"]'); +INSERT INTO inheritance VALUES('bar','["bar", "root"]'); +CREATE TABLE suite_params_checkpoints(id INTEGER, key TEXT, value TEXT, PRIMARY KEY(id, key)); +CREATE TABLE task_pool_checkpoints(id INTEGER, cycle TEXT, name TEXT, spawned INTEGER, status TEXT, hold_swap TEXT, PRIMARY KEY(id, cycle, name)); +CREATE TABLE task_outputs(cycle TEXT, name TEXT, outputs TEXT, PRIMARY KEY(cycle, name)); +INSERT INTO task_outputs VALUES('2051','foo','{"x": "the quick brown fox"}'); +CREATE TABLE broadcast_states(point TEXT, namespace TEXT, key TEXT, value TEXT, PRIMARY KEY(point, namespace, key)); +CREATE TABLE task_timeout_timers(cycle TEXT, name TEXT, timeout REAL, PRIMARY KEY(cycle, name)); +CREATE TABLE task_states(name TEXT, cycle TEXT, time_created TEXT, time_updated TEXT, submit_num INTEGER, status TEXT, PRIMARY KEY(name, cycle)); +INSERT INTO task_states VALUES('foo','2051','2024-05-30T14:11:40+12:00','2024-05-30T14:11:41+12:00',1,'succeeded'); +INSERT INTO task_states VALUES('bar','2051','2024-05-30T14:11:40+12:00','2024-05-30T14:11:43+12:00',1,'succeeded'); +CREATE TABLE broadcast_events(time TEXT, change TEXT, point TEXT, namespace TEXT, key TEXT, value TEXT); +CREATE TABLE task_events(name TEXT, cycle TEXT, time TEXT, submit_num INTEGER, event TEXT, message TEXT); +INSERT INTO task_events VALUES('foo','2051','2024-05-30T14:11:41+12:00',1,'submitted',''); +INSERT INTO task_events VALUES('foo','2051','2024-05-30T14:11:41+12:00',1,'started',''); +INSERT INTO task_events VALUES('foo','2051','2024-05-30T14:11:41+12:00',1,'x','the quick brown fox'); +INSERT INTO task_events VALUES('foo','2051','2024-05-30T14:11:41+12:00',1,'succeeded',''); +INSERT INTO task_events VALUES('bar','2051','2024-05-30T14:11:43+12:00',1,'submitted',''); +INSERT INTO task_events VALUES('bar','2051','2024-05-30T14:11:43+12:00',1,'started',''); +INSERT INTO task_events VALUES('bar','2051','2024-05-30T14:11:43+12:00',1,'succeeded',''); +CREATE TABLE suite_template_vars(key TEXT, value TEXT, PRIMARY KEY(key)); +CREATE TABLE task_pool(cycle TEXT, name TEXT, spawned INTEGER, status TEXT, hold_swap TEXT, PRIMARY KEY(cycle, name)); +INSERT INTO task_pool VALUES('2051','foo',1,'succeeded',NULL); +INSERT INTO task_pool VALUES('2051','bar',1,'succeeded',NULL); +CREATE TABLE xtriggers(signature TEXT, results TEXT, PRIMARY KEY(signature)); +CREATE TABLE task_action_timers(cycle TEXT, name TEXT, ctx_key TEXT, ctx TEXT, delays TEXT, num INTEGER, delay TEXT, timeout TEXT, PRIMARY KEY(cycle, name, ctx_key)); +INSERT INTO task_action_timers VALUES('2051','foo','["try_timers", "retrying"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('2051','foo','["try_timers", "submit-retrying"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('2051','bar','["try_timers", "retrying"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('2051','bar','["try_timers", "submit-retrying"]','null','[]',0,NULL,NULL); +COMMIT; diff --git a/tests/functional/workflow-state/backcompat/schema-2.sql b/tests/functional/workflow-state/backcompat/schema-2.sql new file mode 100644 index 00000000000..ff12c2e9fdd --- /dev/null +++ b/tests/functional/workflow-state/backcompat/schema-2.sql @@ -0,0 +1,49 @@ +PRAGMA foreign_keys=OFF; +BEGIN TRANSACTION; +CREATE TABLE suite_params(key TEXT, value TEXT, PRIMARY KEY(key)); +INSERT INTO suite_params VALUES('uuid_str','0d0bf7e8-4543-4aeb-8bc6-397e3a03ee19'); +INSERT INTO suite_params VALUES('run_mode','live'); +INSERT INTO suite_params VALUES('cylc_version','7.9.9'); +INSERT INTO suite_params VALUES('UTC_mode','0'); +INSERT INTO suite_params VALUES('cycle_point_format','CCYY'); +INSERT INTO suite_params VALUES('cycle_point_tz','+1200'); +CREATE TABLE task_jobs(cycle TEXT, name TEXT, submit_num INTEGER, is_manual_submit INTEGER, try_num INTEGER, time_submit TEXT, time_submit_exit TEXT, submit_status INTEGER, time_run TEXT, time_run_exit TEXT, run_signal TEXT, run_status INTEGER, user_at_host TEXT, batch_sys_name TEXT, batch_sys_job_id TEXT, PRIMARY KEY(cycle, name, submit_num)); +INSERT INTO task_jobs VALUES('2051','foo',1,0,1,'2024-05-30T14:11:40+12:00','2024-05-30T14:11:40+12:00',0,'2024-05-30T14:11:40+12:00','2024-05-30T14:11:40+12:00',NULL,0,'NIWA-1022450.niwa.local','background','12272'); +INSERT INTO task_jobs VALUES('2051','bar',1,0,1,'2024-05-30T14:11:42+12:00','2024-05-30T14:11:42+12:00',0,'2024-05-30T14:11:42+12:00','2024-05-30T14:11:42+12:00',NULL,0,'NIWA-1022450.niwa.local','background','12327'); +CREATE TABLE task_late_flags(cycle TEXT, name TEXT, value INTEGER, PRIMARY KEY(cycle, name)); +CREATE TABLE broadcast_states_checkpoints(id INTEGER, point TEXT, namespace TEXT, key TEXT, value TEXT, PRIMARY KEY(id, point, namespace, key)); +CREATE TABLE checkpoint_id(id INTEGER, time TEXT, event TEXT, PRIMARY KEY(id)); +INSERT INTO checkpoint_id VALUES(0,'2024-05-30T14:11:43+12:00','latest'); +CREATE TABLE inheritance(namespace TEXT, inheritance TEXT, PRIMARY KEY(namespace)); +INSERT INTO inheritance VALUES('root','["root"]'); +INSERT INTO inheritance VALUES('foo','["foo", "root"]'); +INSERT INTO inheritance VALUES('bar','["bar", "root"]'); +CREATE TABLE suite_params_checkpoints(id INTEGER, key TEXT, value TEXT, PRIMARY KEY(id, key)); +CREATE TABLE task_pool_checkpoints(id INTEGER, cycle TEXT, name TEXT, spawned INTEGER, status TEXT, hold_swap TEXT, PRIMARY KEY(id, cycle, name)); +CREATE TABLE task_outputs(cycle TEXT, name TEXT, outputs TEXT, PRIMARY KEY(cycle, name)); +INSERT INTO task_outputs VALUES('2051','foo','{"x": "the quick brown fox"}'); +CREATE TABLE broadcast_states(point TEXT, namespace TEXT, key TEXT, value TEXT, PRIMARY KEY(point, namespace, key)); +CREATE TABLE task_timeout_timers(cycle TEXT, name TEXT, timeout REAL, PRIMARY KEY(cycle, name)); +CREATE TABLE task_states(name TEXT, cycle TEXT, time_created TEXT, time_updated TEXT, submit_num INTEGER, status TEXT, PRIMARY KEY(name, cycle)); +INSERT INTO task_states VALUES('foo','2051','2024-05-30T14:11:40+12:00','2024-05-30T14:11:41+12:00',1,'succeeded'); +INSERT INTO task_states VALUES('bar','2051','2024-05-30T14:11:40+12:00','2024-05-30T14:11:43+12:00',1,NULL); +CREATE TABLE broadcast_events(time TEXT, change TEXT, point TEXT, namespace TEXT, key TEXT, value TEXT); +CREATE TABLE task_events(name TEXT, cycle TEXT, time TEXT, submit_num INTEGER, event TEXT, message TEXT); +INSERT INTO task_events VALUES('foo','2051','2024-05-30T14:11:41+12:00',1,'submitted',''); +INSERT INTO task_events VALUES('foo','2051','2024-05-30T14:11:41+12:00',1,'started',''); +INSERT INTO task_events VALUES('foo','2051','2024-05-30T14:11:41+12:00',1,'x','the quick brown fox'); +INSERT INTO task_events VALUES('foo','2051','2024-05-30T14:11:41+12:00',1,'succeeded',''); +INSERT INTO task_events VALUES('bar','2051','2024-05-30T14:11:43+12:00',1,'submitted',''); +INSERT INTO task_events VALUES('bar','2051','2024-05-30T14:11:43+12:00',1,'started',''); +INSERT INTO task_events VALUES('bar','2051','2024-05-30T14:11:43+12:00',1,'succeeded',''); +CREATE TABLE suite_template_vars(key TEXT, value TEXT, PRIMARY KEY(key)); +CREATE TABLE task_pool(cycle TEXT, name TEXT, spawned INTEGER, status TEXT, hold_swap TEXT, PRIMARY KEY(cycle, name)); +INSERT INTO task_pool VALUES('2051','foo',1,'succeeded',NULL); +INSERT INTO task_pool VALUES('2051','bar',1,'succeeded',NULL); +CREATE TABLE xtriggers(signature TEXT, results TEXT, PRIMARY KEY(signature)); +CREATE TABLE task_action_timers(cycle TEXT, name TEXT, ctx_key TEXT, ctx TEXT, delays TEXT, num INTEGER, delay TEXT, timeout TEXT, PRIMARY KEY(cycle, name, ctx_key)); +INSERT INTO task_action_timers VALUES('2051','foo','["try_timers", "retrying"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('2051','foo','["try_timers", "submit-retrying"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('2051','bar','["try_timers", "retrying"]','null','[]',0,NULL,NULL); +INSERT INTO task_action_timers VALUES('2051','bar','["try_timers", "submit-retrying"]','null','[]',0,NULL,NULL); +COMMIT; diff --git a/tests/functional/workflow-state/backcompat/suite.rc b/tests/functional/workflow-state/backcompat/suite.rc new file mode 100644 index 00000000000..2d9fdb846ea --- /dev/null +++ b/tests/functional/workflow-state/backcompat/suite.rc @@ -0,0 +1,16 @@ +[cylc] + cycle point format = CCYY +[scheduling] + initial cycle point = 2051 + [[dependencies]] + [[[R1]]] + graph = """ + foo:x => bar + """ +[runtime] + [[foo]] + script = "cylc message 'the quick brown fox'" + [[[outputs]]] + x = "the quick brown fox" + [[bar]] + diff --git a/tests/functional/workflow-state/datetime/flow.cylc b/tests/functional/workflow-state/datetime/flow.cylc new file mode 100644 index 00000000000..e00b4db3334 --- /dev/null +++ b/tests/functional/workflow-state/datetime/flow.cylc @@ -0,0 +1,21 @@ +[scheduler] + cycle point format = CCYY +[scheduling] + initial cycle point = 2051 + final cycle point = 2052 + [[graph]] + P1Y = """ + foo:x => bar + """ +[runtime] + [[foo]] + script = cylc message "hello" + [[[outputs]]] + x = "hello" + [[bar]] + script = """ + if (( CYLC_TASK_CYCLE_POINT == 2052 )) && (( CYLC_TASK_SUBMIT_NUMBER == 1 )) + then + cylc trigger --flow=new $CYLC_WORKFLOW_ID//2052/foo + fi + """ diff --git a/tests/functional/workflow-state/integer/flow.cylc b/tests/functional/workflow-state/integer/flow.cylc new file mode 100644 index 00000000000..3ca3eb462b8 --- /dev/null +++ b/tests/functional/workflow-state/integer/flow.cylc @@ -0,0 +1,14 @@ +[scheduling] + cycling mode = integer + initial cycle point = 1 + final cycle point = 2 + [[graph]] + P1 = """ + foo:x => bar + """ +[runtime] + [[foo]] + script = cylc message "hello" + [[[outputs]]] + x = "hello" + [[bar]] diff --git a/tests/functional/workflow-state/options/flow.cylc b/tests/functional/workflow-state/options/flow.cylc index a52bcc9970a..f1fa4000233 100644 --- a/tests/functional/workflow-state/options/flow.cylc +++ b/tests/functional/workflow-state/options/flow.cylc @@ -15,8 +15,8 @@ [[foo]] script = true [[env_polling]] - script = cylc workflow-state $CYLC_WORKFLOW_ID --task=foo --task-point -S succeeded + script = cylc workflow-state ${CYLC_WORKFLOW_ID}//$CYLC_TASK_CYCLE_POINT/foo:succeeded [[offset_polling]] - script = cylc workflow-state $CYLC_WORKFLOW_ID --task=foo -p 20100101T0000Z --offset=P1D + script = cylc workflow-state ${CYLC_WORKFLOW_ID}//20100102T0000Z/foo --offset=P1D [[offset_polling2]] - script = cylc workflow-state $CYLC_WORKFLOW_ID --task=foo -p 20100101T0000Z --offset=-P1D + script = cylc workflow-state ${CYLC_WORKFLOW_ID}//20100102T0000Z/foo --offset=-P1D diff --git a/tests/functional/workflow-state/message/flow.cylc b/tests/functional/workflow-state/output/flow.cylc similarity index 100% rename from tests/functional/workflow-state/message/flow.cylc rename to tests/functional/workflow-state/output/flow.cylc diff --git a/tests/functional/workflow-state/message/reference.log b/tests/functional/workflow-state/output/reference.log similarity index 100% rename from tests/functional/workflow-state/message/reference.log rename to tests/functional/workflow-state/output/reference.log diff --git a/tests/functional/workflow-state/polling/flow.cylc b/tests/functional/workflow-state/polling/flow.cylc index 82dccc9bc07..23f4891569b 100644 --- a/tests/functional/workflow-state/polling/flow.cylc +++ b/tests/functional/workflow-state/polling/flow.cylc @@ -1,5 +1,8 @@ #!jinja2 +{# e.g. set OUTPUT = ":x" #} +{% set OUTPUT = OUTPUT | default("") %} + [meta] title = "polls for success and failure tasks in another workflow" [scheduler] @@ -8,7 +11,7 @@ [[graph]] R1 = """ l-good<{{UPSTREAM}}::good-stuff> & lbad<{{UPSTREAM}}::bad:fail> - l-mess<{{UPSTREAM}}::messenger> => done + l-mess<{{UPSTREAM}}::messenger{{OUTPUT}}> => done """ [runtime] [[l-good,lbad]] diff --git a/tests/functional/workflow-state/template_ref/flow.cylc b/tests/functional/workflow-state/template_ref/flow.cylc deleted file mode 100644 index a01c722c1dc..00000000000 --- a/tests/functional/workflow-state/template_ref/flow.cylc +++ /dev/null @@ -1,13 +0,0 @@ -[scheduler] - UTC mode = True - cycle point format = %Y - -[scheduling] - initial cycle point = 2010 - final cycle point = 2011 - [[graph]] - P1Y = foo - -[runtime] - [[foo]] - script = true diff --git a/tests/functional/workflow-state/template_ref/reference.log b/tests/functional/workflow-state/template_ref/reference.log deleted file mode 100644 index 97101910d54..00000000000 --- a/tests/functional/workflow-state/template_ref/reference.log +++ /dev/null @@ -1,4 +0,0 @@ -Initial point: 2010 -Final point: 2011 -2010/foo -triggered off [] -2011/foo -triggered off [] diff --git a/tests/functional/xtriggers/03-sequence.t b/tests/functional/xtriggers/03-sequence.t index 1bb24d521a9..d8abdb2906b 100644 --- a/tests/functional/xtriggers/03-sequence.t +++ b/tests/functional/xtriggers/03-sequence.t @@ -60,4 +60,3 @@ __END__ cylc stop --now --max-polls=10 --interval=2 "${WORKFLOW_NAME}" purge -exit diff --git a/tests/functional/xtriggers/04-sequential.t b/tests/functional/xtriggers/04-sequential.t index 211aa47277f..f3837590b9b 100644 --- a/tests/functional/xtriggers/04-sequential.t +++ b/tests/functional/xtriggers/04-sequential.t @@ -35,11 +35,7 @@ init_workflow "${TEST_NAME_BASE}" << '__FLOW_CONFIG__' clock_1 = wall_clock(offset=P2Y, sequential=False) clock_2 = wall_clock() up_1 = workflow_state(\ - workflow=%(workflow)s, \ - task=b, \ - point=%(point)s, \ - offset=-P1Y, \ - sequential=False \ + workflow_task_id=%(workflow)s//%(point)s/b:succeeded, offset=-P1Y, sequential=False \ ):PT1S [[graph]] R1 = """ @@ -65,8 +61,7 @@ cylc stop --max-polls=10 --interval=2 "${WORKFLOW_NAME}" cylc play "${WORKFLOW_NAME}" cylc show "${WORKFLOW_NAME}//3001/a" | grep -E 'state: ' > 3001.a.log -cylc show "${WORKFLOW_NAME}//3002/a" 2>&1 >/dev/null \ - | grep -E 'No matching' > 3002.a.log +cylc show "${WORKFLOW_NAME}//3002/a" 2>&1 >/dev/null | grep -E 'No matching' > 3002.a.log # 3001/a should be spawned at both 3000/3001. cmp_ok 3001.a.log - <<__END__ @@ -81,9 +76,10 @@ cylc reload "${WORKFLOW_NAME}" cylc remove "${WORKFLOW_NAME}//3001/b" +poll_grep_workflow_log 'Command "remove_tasks" actioned.' + cylc show "${WORKFLOW_NAME}//3002/b" | grep -E 'state: ' > 3002.b.log -cylc show "${WORKFLOW_NAME}//3003/b" 2>&1 >/dev/null \ - | grep -E 'No matching' > 3003.b.log +cylc show "${WORKFLOW_NAME}//3003/b" 2>&1 >/dev/null | grep -E 'No matching' > 3003.b.log # 3002/b should be only at 3002. cmp_ok 3002.b.log - <<__END__ @@ -104,7 +100,6 @@ cmp_ok 3005.c.log - <<__END__ state: waiting __END__ - cylc stop --now --max-polls=10 --interval=2 "${WORKFLOW_NAME}" + purge -exit diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index edfe56e2a1f..518ef40f018 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -33,7 +33,7 @@ install as cylc_install, get_option_parser as install_gop ) -from cylc.flow.util import serialise +from cylc.flow.util import serialise_set from cylc.flow.wallclock import get_current_time_string from cylc.flow.workflow_files import infer_latest_run_from_id from cylc.flow.workflow_status import StopMode @@ -545,7 +545,7 @@ def _submit_task_jobs(*args, **kwargs): deps = tuple(sorted(itask.state.get_resolved_dependencies())) if flow_nums: triggers.add( - (itask.identity, serialise(itask.flow_nums), deps or None) + (itask.identity, serialise_set(itask.flow_nums), deps or None) ) else: triggers.add((itask.identity, deps or None)) @@ -558,8 +558,12 @@ def _submit_task_jobs(*args, **kwargs): return _reflog -@pytest.fixture -def complete(): +async def _complete( + schd, + *tokens_list: Union[Tokens, str], + stop_mode=StopMode.AUTO, + timeout: int = 60, +) -> None: """Wait for the workflow, or tasks within it to complete. Args: @@ -584,65 +588,67 @@ def complete(): async_timeout (handles shutdown logic more cleanly). """ - async def _complete( - schd, - *tokens_list: Union[Tokens, str], - stop_mode=StopMode.AUTO, - timeout: int = 60, - ) -> None: - start_time = time() - - _tokens_list: List[Tokens] = [] - for tokens in tokens_list: - if isinstance(tokens, str): - tokens = Tokens(tokens, relative=True) - _tokens_list.append(tokens.task) - - # capture task completion - remove_if_complete = schd.pool.remove_if_complete - - def _remove_if_complete(itask, output=None): - nonlocal _tokens_list - ret = remove_if_complete(itask) - if ret and itask.tokens.task in _tokens_list: - _tokens_list.remove(itask.tokens.task) - return ret - - schd.pool.remove_if_complete = _remove_if_complete - - # capture workflow shutdown - set_stop = schd._set_stop - has_shutdown = False - - def _set_stop(mode=None): - nonlocal has_shutdown, stop_mode - if mode == stop_mode: - has_shutdown = True - return set_stop(mode) - else: - set_stop(mode) - raise Exception(f'Workflow bailed with stop mode = {mode}') - - schd._set_stop = _set_stop - - # determine the completion condition - if _tokens_list: - condition = lambda: bool(_tokens_list) + start_time = time() + + _tokens_list: List[Tokens] = [] + for tokens in tokens_list: + if isinstance(tokens, str): + tokens = Tokens(tokens, relative=True) + _tokens_list.append(tokens.task) + + # capture task completion + remove_if_complete = schd.pool.remove_if_complete + + def _remove_if_complete(itask, output=None): + nonlocal _tokens_list + ret = remove_if_complete(itask) + if ret and itask.tokens.task in _tokens_list: + _tokens_list.remove(itask.tokens.task) + return ret + + schd.pool.remove_if_complete = _remove_if_complete + + # capture workflow shutdown + set_stop = schd._set_stop + has_shutdown = False + + def _set_stop(mode=None): + nonlocal has_shutdown, stop_mode + if mode == stop_mode: + has_shutdown = True + return set_stop(mode) else: - condition = lambda: bool(not has_shutdown) + set_stop(mode) + raise Exception(f'Workflow bailed with stop mode = {mode}') + + schd._set_stop = _set_stop + + # determine the completion condition + if _tokens_list: + condition = lambda: bool(_tokens_list) + else: + condition = lambda: bool(not has_shutdown) + + # wait for the condition to be met + while condition(): + # allow the main loop to advance + await asyncio.sleep(0) + if (time() - start_time) > timeout: + raise Exception( + f'Timeout waiting for {", ".join(map(str, _tokens_list))}' + ) + + # restore regular shutdown logic + schd._set_stop = set_stop + - # wait for the condition to be met - while condition(): - # allow the main loop to advance - await asyncio.sleep(0) - if (time() - start_time) > timeout: - raise Exception( - f'Timeout waiting for {", ".join(map(str, _tokens_list))}' - ) +@pytest.fixture +def complete(): + return _complete - # restore regular shutdown logic - schd._set_stop = set_stop +@pytest.fixture(scope='module') +def mod_complete(): return _complete diff --git a/tests/integration/scripts/test_validate_integration.py b/tests/integration/scripts/test_validate_integration.py index bd94a9d60cd..dcf697aac36 100644 --- a/tests/integration/scripts/test_validate_integration.py +++ b/tests/integration/scripts/test_validate_integration.py @@ -105,7 +105,7 @@ def test_validate_simple_graph(flow, validate, caplog): }) validate(id_) expect = ( - 'deprecated graph items were automatically upgraded' + 'graph items were automatically upgraded' ' in "workflow definition":' '\n * (8.0.0) [scheduling][dependencies]graph -> [scheduling][graph]R1' ) @@ -205,7 +205,7 @@ def test_graph_upgrade_msg_graph_equals2(flow, validate, caplog): }) validate(id_) expect = ( - 'deprecated graph items were automatically upgraded in' + 'graph items were automatically upgraded in' ' "workflow definition":' '\n * (8.0.0) [scheduling][dependencies][X]graph' ' -> [scheduling][graph]X - for X in:' diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py index 0a186aa41bd..6e7454f1293 100644 --- a/tests/integration/test_config.py +++ b/tests/integration/test_config.py @@ -357,7 +357,7 @@ def test_xtrig_validation_wall_clock( } }) with pytest.raises(WorkflowConfigError, match=( - r'\[@myxt\] wall_clock\(offset=PT7MH\) validation failed: ' + r'\[@myxt\] wall_clock\(offset=PT7MH\)\n' r'Invalid offset: PT7MH' )): validate(id_) @@ -392,7 +392,7 @@ def test_xtrig_validation_echo( }) with pytest.raises( WorkflowConfigError, - match=r'echo.* Requires \'succeed=True/False\' arg' + match=r'Requires \'succeed=True/False\' arg' ): validate(id_) @@ -461,12 +461,12 @@ def kustom_validate(args): @pytest.mark.parametrize('xtrig_call, expected_msg', [ pytest.param( 'xrandom()', - r"xrandom.* missing a required argument: 'percent'", + r"missing a required argument: 'percent'", id="missing-arg" ), pytest.param( 'wall_clock(alan_grant=1)', - r"wall_clock.* unexpected keyword argument 'alan_grant'", + r"unexpected keyword argument 'alan_grant'", id="unexpected-arg" ), ]) diff --git a/tests/integration/test_dbstatecheck.py b/tests/integration/test_dbstatecheck.py new file mode 100644 index 00000000000..a6da4348ffb --- /dev/null +++ b/tests/integration/test_dbstatecheck.py @@ -0,0 +1,139 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Tests for the backend method of workflow_state""" + + +from asyncio import sleep +import pytest +from textwrap import dedent +from typing import TYPE_CHECKING + +from cylc.flow.dbstatecheck import CylcWorkflowDBChecker as Checker + + +if TYPE_CHECKING: + from cylc.flow.dbstatecheck import CylcWorkflowDBChecker + + +@pytest.fixture(scope='module') +async def checker( + mod_flow, mod_scheduler, mod_run, mod_complete +) -> 'CylcWorkflowDBChecker': + """Make a real world database. + + We could just write the database manually but this is a better + test of the overall working of the function under test. + """ + wid = mod_flow({ + 'scheduling': { + 'graph': {'P1Y': dedent(''' + good:succeeded + bad:failed? + output:custom_output + ''')}, + 'initial cycle point': '1000', + 'final cycle point': '1001' + }, + 'runtime': { + 'bad': {'simulation': {'fail cycle points': '1000'}}, + 'output': {'outputs': {'trigger': 'message'}} + } + }) + schd = mod_scheduler(wid, paused_start=False) + async with mod_run(schd): + await mod_complete(schd) + schd.pool.force_trigger_tasks(['1000/good'], [2]) + # Allow a cycle of the main loop to pass so that flow 2 can be + # added to db + await sleep(1) + yield Checker( + 'somestring', 'utterbunkum', + schd.workflow_db_mgr.pub_path + ) + + +def test_basic(checker): + """Pass no args, get unfiltered output""" + result = checker.workflow_state_query() + expect = [ + ['bad', '10000101T0000Z', 'failed'], + ['bad', '10010101T0000Z', 'succeeded'], + ['good', '10000101T0000Z', 'succeeded'], + ['good', '10010101T0000Z', 'succeeded'], + ['output', '10000101T0000Z', 'succeeded'], + ['output', '10010101T0000Z', 'succeeded'], + ['good', '10000101T0000Z', 'waiting', '(flows=2)'], + ] + assert result == expect + + +def test_task(checker): + """Filter by task name""" + result = checker.workflow_state_query(task='bad') + assert result == [ + ['bad', '10000101T0000Z', 'failed'], + ['bad', '10010101T0000Z', 'succeeded'] + ] + + +def test_point(checker): + """Filter by point""" + result = checker.workflow_state_query(cycle='10000101T0000Z') + assert result == [ + ['bad', '10000101T0000Z', 'failed'], + ['good', '10000101T0000Z', 'succeeded'], + ['output', '10000101T0000Z', 'succeeded'], + ['good', '10000101T0000Z', 'waiting', '(flows=2)'], + ] + + +def test_status(checker): + """Filter by status""" + result = checker.workflow_state_query(selector='failed') + expect = [ + ['bad', '10000101T0000Z', 'failed'], + ] + assert result == expect + + +def test_output(checker): + """Filter by flow number""" + result = checker.workflow_state_query(selector='message', is_message=True) + expect = [ + [ + 'output', + '10000101T0000Z', + "{'submitted': 'submitted', 'started': 'started', 'succeeded': " + "'succeeded', 'trigger': 'message'}", + ], + [ + 'output', + '10010101T0000Z', + "{'submitted': 'submitted', 'started': 'started', 'succeeded': " + "'succeeded', 'trigger': 'message'}", + ], + ] + assert result == expect + + +def test_flownum(checker): + """Pass no args, get unfiltered output""" + result = checker.workflow_state_query(flow_num=2) + expect = [ + ['good', '10000101T0000Z', 'waiting', '(flows=2)'], + ] + assert result == expect diff --git a/tests/integration/test_sequential_xtriggers.py b/tests/integration/test_sequential_xtriggers.py index cbe0051d084..8d3b6129044 100644 --- a/tests/integration/test_sequential_xtriggers.py +++ b/tests/integration/test_sequential_xtriggers.py @@ -190,7 +190,7 @@ def xtrig2(x, sequential='True'): with pytest.raises(XtriggerConfigError) as excinfo: validate(wid) assert ( - "reserved argument 'sequential' that has no boolean default" + "reserved argument 'sequential' with no boolean default" ) in str(excinfo.value) diff --git a/tests/integration/test_xtrigger_mgr.py b/tests/integration/test_xtrigger_mgr.py index 612049163cc..3bf425650c4 100644 --- a/tests/integration/test_xtrigger_mgr.py +++ b/tests/integration/test_xtrigger_mgr.py @@ -188,3 +188,46 @@ def mytrig(*args, **kwargs): # check the DB to ensure no additional entries have been created assert db_select(schd, True, 'xtriggers') == db_xtriggers + + +async def test_error_in_xtrigger(flow, start, scheduler): + """Failure in an xtrigger is handled nicely. + """ + id_ = flow({ + 'scheduler': { + 'allow implicit tasks': 'True' + }, + 'scheduling': { + 'xtriggers': { + 'mytrig': 'mytrig()' + }, + 'graph': { + 'R1': '@mytrig => foo' + }, + } + }) + + # add a custom xtrigger to the workflow + run_dir = Path(get_workflow_run_dir(id_)) + xtrig_dir = run_dir / 'lib/python' + xtrig_dir.mkdir(parents=True) + (xtrig_dir / 'mytrig.py').write_text(dedent(''' + def mytrig(*args, **kwargs): + raise Exception('This Xtrigger is broken') + ''')) + + schd = scheduler(id_) + async with start(schd) as log: + foo = schd.pool.get_tasks()[0] + schd.xtrigger_mgr.call_xtriggers_async(foo) + for _ in range(50): + await asyncio.sleep(0.1) + schd.proc_pool.process() + if len(schd.proc_pool.runnings) == 0: + break + else: + raise Exception('Process pool did not clear') + + error = log.messages[-1].split('\n') + assert error[-2] == 'Exception: This Xtrigger is broken' + assert error[0] == 'ERROR in xtrigger mytrig()' diff --git a/tests/unit/cycling/test_util.py b/tests/unit/cycling/test_util.py index 2cec1278cfe..fce316c6de5 100644 --- a/tests/unit/cycling/test_util.py +++ b/tests/unit/cycling/test_util.py @@ -24,10 +24,12 @@ def test_add_offset(): """Test socket start.""" orig_point = '20200202T0000Z' plus_offset = '+PT02H02M' - print(add_offset(orig_point, plus_offset)) assert str(add_offset(orig_point, plus_offset)) == '20200202T0202Z' minus_offset = '-P1MT22H59M' assert str(add_offset(orig_point, minus_offset)) == '20200101T0101Z' + assert str( + add_offset(orig_point, minus_offset, dmp_fmt="CCYY-MM-DDThh:mmZ") + ) == '2020-01-01T01:01Z' bad_offset = '+foo' - with pytest.raises(ValueError, match=r'ERROR, bad offset format') as exc: - bad_point = add_offset(orig_point, bad_offset) + with pytest.raises(ValueError, match=r'ERROR, bad offset format'): + add_offset(orig_point, bad_offset) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 653c1c11f8b..9cdcee89003 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from copy import deepcopy import os import sys from optparse import Values @@ -23,7 +22,6 @@ import pytest import logging from types import SimpleNamespace -from unittest.mock import Mock from contextlib import suppress from cylc.flow import CYLC_LOG @@ -40,10 +38,8 @@ from cylc.flow.parsec.exceptions import Jinja2Error, EmPyError from cylc.flow.scheduler_cli import RunOptions from cylc.flow.scripts.validate import ValidateOptions -from cylc.flow.simulation import configure_sim_modes from cylc.flow.workflow_files import WorkflowFiles from cylc.flow.wallclock import get_utc_mode, set_utc_mode -from cylc.flow.xtrigger_mgr import XtriggerManager from cylc.flow.task_outputs import ( TASK_OUTPUT_SUBMITTED, TASK_OUTPUT_SUCCEEDED, @@ -86,8 +82,7 @@ class TestWorkflowConfig: """Test class for the Cylc WorkflowConfig object.""" def test_xfunction_imports( - self, mock_glbl_cfg: Fixture, tmp_path: Path, - xtrigger_mgr: XtriggerManager): + self, mock_glbl_cfg: Fixture, tmp_path: Path): """Test for a workflow configuration with valid xtriggers""" mock_glbl_cfg( 'cylc.flow.platforms.glbl_cfg', @@ -115,10 +110,9 @@ def test_xfunction_imports( """ flow_file.write_text(flow_config) workflow_config = WorkflowConfig( - workflow="name_a_tree", fpath=flow_file, options=SimpleNamespace(), - xtrigger_mgr=xtrigger_mgr + workflow="name_a_tree", fpath=flow_file, options=SimpleNamespace() ) - assert 'tree' in workflow_config.xtrigger_mgr.functx_map + assert 'tree' in workflow_config.xtrigger_collator.functx_map def test_xfunction_import_error(self, mock_glbl_cfg, tmp_path): """Test for error when a xtrigger function cannot be imported.""" @@ -151,7 +145,7 @@ def test_xfunction_import_error(self, mock_glbl_cfg, tmp_path): fpath=flow_file, options=SimpleNamespace() ) - assert "not found" in str(excinfo.value) + assert "No module named 'piranha'" in str(excinfo.value) def test_xfunction_attribute_error(self, mock_glbl_cfg, tmp_path): """Test for error when a xtrigger function cannot be imported.""" @@ -181,7 +175,7 @@ def test_xfunction_attribute_error(self, mock_glbl_cfg, tmp_path): with pytest.raises(XtriggerConfigError) as excinfo: WorkflowConfig(workflow="capybara_workflow", fpath=flow_file, options=SimpleNamespace()) - assert "not found" in str(excinfo.value) + assert "module 'capybara' has no attribute 'capybara'" in str(excinfo.value) def test_xfunction_not_callable(self, mock_glbl_cfg, tmp_path): """Test for error when a xtrigger function is not callable.""" diff --git a/tests/unit/test_db_compat.py b/tests/unit/test_db_compat.py index 1cb31173371..5393e85e67f 100644 --- a/tests/unit/test_db_compat.py +++ b/tests/unit/test_db_compat.py @@ -134,9 +134,9 @@ def test_cylc_7_db_wflow_params_table(_setup_db): with pytest.raises( sqlite3.OperationalError, match="no such table: workflow_params" ): - checker.get_remote_point_format() + checker._get_db_point_format() - assert checker.get_remote_point_format_compat() == ptformat + assert checker.db_point_fmt == ptformat def test_pre_830_task_action_timers(_setup_db): diff --git a/tests/unit/test_graph_parser.py b/tests/unit/test_graph_parser.py index 84a8e4611fd..75a0bf95a83 100644 --- a/tests/unit/test_graph_parser.py +++ b/tests/unit/test_graph_parser.py @@ -314,8 +314,9 @@ def test_inter_workflow_dependence_simple(): 'a': ( 'WORKFLOW', 'TASK', 'failed', '' ), + # Default to "succeeded" is done in config module. 'c': ( - 'WORKFLOW', 'TASK', 'succeeded', '' + 'WORKFLOW', 'TASK', None, '' ) } ) diff --git a/tests/unit/test_util.py b/tests/unit/test_util.py index 6da9b28ca24..1f8c98897a4 100644 --- a/tests/unit/test_util.py +++ b/tests/unit/test_util.py @@ -14,10 +14,10 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from cylc.flow.util import deserialise +from cylc.flow.util import deserialise_set -def test_deserialise(): - actual = deserialise('["2", "3"]') +def test_deserialise_set(): + actual = deserialise_set('["2", "3"]') expected = {'2', '3'} assert actual == expected diff --git a/tests/unit/test_xtrigger_mgr.py b/tests/unit/test_xtrigger_mgr.py index 5192a7d3dd8..276fd354a95 100644 --- a/tests/unit/test_xtrigger_mgr.py +++ b/tests/unit/test_xtrigger_mgr.py @@ -24,13 +24,7 @@ from cylc.flow.subprocctx import SubFuncContext from cylc.flow.task_proxy import TaskProxy from cylc.flow.taskdef import TaskDef -from cylc.flow.xtrigger_mgr import RE_STR_TMPL, XtriggerManager - - -def test_constructor(xtrigger_mgr): - """Test creating an XtriggerManager, and its initial state.""" - # the dict with normal xtriggers starts empty - assert not xtrigger_mgr.functx_map +from cylc.flow.xtrigger_mgr import RE_STR_TMPL, XtriggerCollator def test_extract_templates(): @@ -44,59 +38,82 @@ def test_extract_templates(): ) -def test_add_xtrigger(xtrigger_mgr): - """Test for adding an xtrigger.""" +def test_add_missing_func(): + """Test for adding an xtrigger that can't be found.""" + xtriggers = XtriggerCollator() xtrig = SubFuncContext( - label="echo", - func_name="echo", + label="fooble", + func_name="fooble123", # no such module func_args=["name", "age"], func_kwargs={"location": "soweto"} ) - xtrigger_mgr.add_trig("xtrig", xtrig, 'fdir') - assert xtrig == xtrigger_mgr.functx_map["xtrig"] + with pytest.raises( + XtriggerConfigError, + match=r"\[@xtrig\] fooble123\(.*\)\nNo module named 'fooble123'" + ): + xtriggers.add_trig("xtrig", xtrig, 'fdir') -def test_add_xtrigger_with_params(xtrigger_mgr): - """Test for adding an xtrigger.""" +def test_add_xtrigger(): + """Test for adding and validating an xtrigger.""" + xtriggers = XtriggerCollator() xtrig = SubFuncContext( label="echo", func_name="echo", - func_args=["name", "%(point)s"], - func_kwargs={"%(location)s": "soweto"} # no problem with the key! + func_args=["name", "age"], + func_kwargs={"location": "soweto"} ) - xtrigger_mgr.add_trig("xtrig", xtrig, 'fdir') - assert xtrig == xtrigger_mgr.functx_map["xtrig"] + with pytest.raises( + XtriggerConfigError, + match="Requires 'succeed=True/False' arg" + ): + xtriggers.add_trig("xtrig", xtrig, 'fdir') + xtrig = SubFuncContext( + label="echo", + func_name="echo", + func_args=["name", "age"], + func_kwargs={"location": "soweto", "succeed": True} + ) + xtriggers.add_trig("xtrig", xtrig, 'fdir') + assert xtrig == xtriggers.functx_map["xtrig"] -def test_check_xtrigger_with_unknown_params(): - """Test for adding an xtrigger with an unknown parameter. - The XTriggerManager contains a list of specific parameters that are - available in the function template. +def test_add_xtrigger_with_template_good(): + """Test adding an xtrigger with a valid string template arg value.""" + xtriggers = XtriggerCollator() + xtrig = SubFuncContext( + label="echo", + func_name="echo", + func_args=["name", "%(point)s"], # valid template + func_kwargs={"location": "soweto", "succeed": True} + ) + xtriggers.add_trig("xtrig", xtrig, 'fdir') + assert xtrig == xtriggers.functx_map["xtrig"] - Values that are not strings raise a TypeError during regex matching, but - are ignored, so we should not have any issue with TypeError. - If a value in the format %(foo)s appears in the parameters, and 'foo' - is not in this list of parameters, then a ValueError is expected. - """ +def test_add_xtrigger_with_template_bad(): + """Test adding an xtrigger with an invalid string template arg value.""" + xtriggers = XtriggerCollator() xtrig = SubFuncContext( label="echo", func_name="echo", - func_args=[1, "name", "%(what_is_this)s"], - func_kwargs={"succeed": True} + func_args=["name", "%(point)s"], + # invalid template: + func_kwargs={"location": "%(what_is_this)s", "succeed": True} ) with pytest.raises( XtriggerConfigError, match="Illegal template in xtrigger: what_is_this" ): - XtriggerManager.check_xtrigger("xtrig", xtrig, 'fdir') + xtriggers.add_trig("xtrig", xtrig, 'fdir') -def test_check_xtrigger_with_deprecated_params( +def test_add_xtrigger_with_deprecated_params( caplog: pytest.LogCaptureFixture ): """It should flag deprecated template variables.""" + xtriggers = XtriggerCollator() xtrig = SubFuncContext( label="echo", func_name="echo", @@ -104,7 +121,7 @@ def test_check_xtrigger_with_deprecated_params( func_kwargs={"succeed": True} ) caplog.set_level(logging.WARNING, CYLC_LOG) - XtriggerManager.check_xtrigger("xtrig", xtrig, 'fdir') + xtriggers.add_trig("xtrig", xtrig, 'fdir') assert caplog.messages == [ 'Xtrigger "xtrig" uses deprecated template variables: suite_name' ] @@ -135,6 +152,7 @@ def test_housekeeping_nothing_satisfied(xtrigger_mgr): are kept.""" row = "get_name", "{\"name\": \"function\"}" # now XtriggerManager#sat_xtrigger will contain the get_name xtrigger + xtrigger_mgr.add_xtriggers(XtriggerCollator()) xtrigger_mgr.load_xtrigger_for_restart(row_idx=0, row=row) assert xtrigger_mgr.sat_xtrig xtrigger_mgr.housekeep([]) @@ -144,13 +162,18 @@ def test_housekeeping_nothing_satisfied(xtrigger_mgr): def test_housekeeping_with_xtrigger_satisfied(xtrigger_mgr): """The housekeeping method makes sure only satisfied xtrigger function are kept.""" + + xtriggers = XtriggerCollator() + xtrig = SubFuncContext( label="get_name", - func_name="get_name", + func_name="echo", func_args=[], - func_kwargs={} + func_kwargs={"succeed": True} ) - xtrigger_mgr.add_trig("get_name", xtrig, 'fdir') + xtriggers.add_trig("get_name", xtrig, 'fdir') + xtrigger_mgr.add_xtriggers(xtriggers) + xtrig.out = "[\"True\", {\"name\": \"Yossarian\"}]" tdef = TaskDef( name="foo", @@ -159,15 +182,19 @@ def test_housekeeping_with_xtrigger_satisfied(xtrigger_mgr): start_point=1, initial_point=1, ) + init() sequence = ISO8601Sequence('P1D', '2019') tdef.xtrig_labels[sequence] = ["get_name"] start_point = ISO8601Point('2019') itask = TaskProxy(Tokens('~user/workflow'), tdef, start_point) # pretend the function has been activated + xtrigger_mgr.active.append(xtrig.get_signature()) + xtrigger_mgr.callback(xtrig) assert xtrigger_mgr.sat_xtrig + xtrigger_mgr.housekeep([itask]) # here we still have the same number as before assert xtrigger_mgr.sat_xtrig @@ -175,25 +202,32 @@ def test_housekeeping_with_xtrigger_satisfied(xtrigger_mgr): def test__call_xtriggers_async(xtrigger_mgr): """Test _call_xtriggers_async""" + + xtriggers = XtriggerCollator() + # the echo1 xtrig (not satisfied) echo1_xtrig = SubFuncContext( label="echo1", - func_name="echo1", + func_name="echo", func_args=[], - func_kwargs={} + func_kwargs={"succeed": False} ) echo1_xtrig.out = "[\"True\", {\"name\": \"herminia\"}]" - xtrigger_mgr.add_trig("echo1", echo1_xtrig, "fdir") + xtriggers.add_trig("echo1", echo1_xtrig, "fdir") + # the echo2 xtrig (satisfied through callback later) echo2_xtrig = SubFuncContext( label="echo2", - func_name="echo2", + func_name="echo", func_args=[], - func_kwargs={} + func_kwargs={"succeed": True} ) echo2_xtrig.out = "[\"True\", {\"name\": \"herminia\"}]" - xtrigger_mgr.add_trig("echo2", echo2_xtrig, "fdir") + xtriggers.add_trig("echo2", echo2_xtrig, "fdir") + + xtrigger_mgr.add_xtriggers(xtriggers) + # create a task tdef = TaskDef( name="foo", diff --git a/tests/unit/xtriggers/test_workflow_state.py b/tests/unit/xtriggers/test_workflow_state.py index bb5228984c9..5420a4fd909 100644 --- a/tests/unit/xtriggers/test_workflow_state.py +++ b/tests/unit/xtriggers/test_workflow_state.py @@ -15,35 +15,37 @@ # along with this program. If not, see . from pathlib import Path -import pytest import sqlite3 -from typing import Callable -from unittest.mock import Mock +from typing import Any, Callable from shutil import copytree, rmtree -from cylc.flow.exceptions import InputError -from cylc.flow.pathutil import get_cylc_run_dir +import pytest + +from cylc.flow.dbstatecheck import output_fallback_msg +from cylc.flow.exceptions import WorkflowConfigError +from cylc.flow.rundb import CylcWorkflowDAO from cylc.flow.workflow_files import WorkflowFiles -from cylc.flow.xtriggers.workflow_state import workflow_state -from ..conftest import MonkeyMock +from cylc.flow.xtriggers.workflow_state import ( + _workflow_state_backcompat, + workflow_state, + validate, +) +from cylc.flow.xtriggers.suite_state import suite_state + + +def test_inferred_run(tmp_run_dir: 'Callable', capsys: pytest.CaptureFixture): + """Test that the workflow_state xtrigger infers the run number. + Method: the faked run-dir has no DB to connect to, but the WorkflowPoller + prints inferred ID to stderr if the run-dir exists. -def test_inferred_run(tmp_run_dir: Callable, monkeymock: MonkeyMock): - """Test that the workflow_state xtrigger infers the run number""" + """ id_ = 'isildur' expected_workflow_id = f'{id_}/run1' cylc_run_dir = str(tmp_run_dir()) tmp_run_dir(expected_workflow_id, installed=True, named=True) - mock_db_checker = monkeymock( - 'cylc.flow.xtriggers.workflow_state.CylcWorkflowDBChecker', - return_value=Mock( - get_remote_point_format=lambda: 'CCYY', - ) - ) - - _, results = workflow_state(id_, task='precious', point='3000') - mock_db_checker.assert_called_once_with(cylc_run_dir, expected_workflow_id) - assert results['workflow'] == expected_workflow_id + workflow_state(id_ + '//3000/precious') + assert expected_workflow_id in capsys.readouterr().err # Now test we can see workflows in alternate cylc-run directories # e.g. for `cylc workflow-state` or xtriggers targetting another user. @@ -54,19 +56,15 @@ def test_inferred_run(tmp_run_dir: Callable, monkeymock: MonkeyMock): rmtree(cylc_run_dir) # It can no longer parse IDs in the original cylc-run location. - with pytest.raises(InputError): - _, results = workflow_state(id_, task='precious', point='3000') + workflow_state(id_) + assert expected_workflow_id not in capsys.readouterr().err # But it can via an explicit alternate run directory. - mock_db_checker.reset_mock() - _, results = workflow_state( - id_, task='precious', point='3000', cylc_run_dir=alt_cylc_run_dir) - mock_db_checker.assert_called_once_with( - alt_cylc_run_dir, expected_workflow_id) - assert results['workflow'] == expected_workflow_id + workflow_state(id_, alt_cylc_run_dir=alt_cylc_run_dir) + assert expected_workflow_id in capsys.readouterr().err -def test_back_compat(tmp_run_dir, caplog): +def test_c7_db_back_compat(tmp_run_dir: 'Callable'): """Test workflow_state xtrigger backwards compatibility with Cylc 7 database.""" id_ = 'celebrimbor' @@ -88,6 +86,11 @@ def test_back_compat(tmp_run_dir, caplog): submit_num INTEGER, status TEXT, PRIMARY KEY(name, cycle) ); """) + conn.execute(r""" + CREATE TABLE task_outputs( + cycle TEXT, name TEXT, outputs TEXT, PRIMARY KEY(cycle, name) + ); + """) conn.executemany( r'INSERT INTO "suite_params" VALUES(?,?);', [('cylc_version', '7.8.12'), @@ -95,9 +98,14 @@ def test_back_compat(tmp_run_dir, caplog): ('cycle_point_tz', 'Z')] ) conn.execute(r""" - INSERT INTO "task_states" VALUES( - 'mithril','2012','2023-01-30T18:19:15Z','2023-01-30T18:19:15Z', - 0,'succeeded' + INSERT INTO "task_states" VALUES( + 'mithril','2012','2023-01-30T18:19:15Z','2023-01-30T18:19:15Z', + 0,'succeeded' + ); + """) + conn.execute(r""" + INSERT INTO "task_outputs" VALUES( + '2012','mithril','{"frodo": "bag end"}' ); """) conn.commit() @@ -105,14 +113,182 @@ def test_back_compat(tmp_run_dir, caplog): conn.close() # Test workflow_state function - satisfied, _ = workflow_state(id_, task='mithril', point='2012') + satisfied, _ = workflow_state(f'{id_}//2012/mithril') + assert satisfied + satisfied, _ = workflow_state(f'{id_}//2012/mithril:succeeded') assert satisfied - satisfied, _ = workflow_state(id_, task='arkenstone', point='2012') + satisfied, _ = workflow_state( + f'{id_}//2012/mithril:frodo', is_trigger=True + ) + assert satisfied + satisfied, _ = workflow_state( + f'{id_}//2012/mithril:"bag end"', is_message=True + ) + assert satisfied + satisfied, _ = workflow_state(f'{id_}//2012/mithril:pippin') + assert not satisfied + satisfied, _ = workflow_state(id_ + '//2012/arkenstone') assert not satisfied # Test back-compat (old suite_state function) - from cylc.flow.xtriggers.suite_state import suite_state satisfied, _ = suite_state(suite=id_, task='mithril', point='2012') assert satisfied + satisfied, _ = suite_state( + suite=id_, task='mithril', point='2012', status='succeeded' + ) + assert satisfied + satisfied, _ = suite_state( + suite=id_, task='mithril', point='2012', message='bag end' + ) + assert satisfied satisfied, _ = suite_state(suite=id_, task='arkenstone', point='2012') assert not satisfied + + +def test_c8_db_back_compat( + tmp_run_dir: 'Callable', + capsys: pytest.CaptureFixture, +): + """Test workflow_state xtrigger backwards compatibility with Cylc < 8.3.0 + database.""" + id_ = 'nazgul' + run_dir: Path = tmp_run_dir(id_) + db_file = run_dir / 'log' / 'db' + db_file.parent.mkdir(exist_ok=True) + # Note: don't use CylcWorkflowDAO here as DB should be frozen + conn = sqlite3.connect(str(db_file)) + try: + conn.execute(r""" + CREATE TABLE workflow_params( + key TEXT, value TEXT, PRIMARY KEY(key) + ); + """) + conn.execute(r""" + CREATE TABLE task_states( + name TEXT, cycle TEXT, flow_nums TEXT, time_created TEXT, + time_updated TEXT, submit_num INTEGER, status TEXT, + flow_wait INTEGER, is_manual_submit INTEGER, + PRIMARY KEY(name, cycle, flow_nums) + ); + """) + conn.execute(r""" + CREATE TABLE task_outputs( + cycle TEXT, name TEXT, flow_nums TEXT, outputs TEXT, + PRIMARY KEY(cycle, name, flow_nums) + ); + """) + conn.executemany( + r'INSERT INTO "workflow_params" VALUES(?,?);', + [('cylc_version', '8.2.7'), + ('cycle_point_format', '%Y'), + ('cycle_point_tz', 'Z')] + ) + conn.execute(r""" + INSERT INTO "task_states" VALUES( + 'gimli','2012','[1]','2023-01-30T18:19:15Z', + '2023-01-30T18:19:15Z',1,'succeeded',0,0 + ); + """) + conn.execute(r""" + INSERT INTO "task_outputs" VALUES( + '2012','gimli','[1]', + '["submitted", "started", "succeeded", "axe"]' + ); + """) + conn.commit() + finally: + conn.close() + + gimli = f'{id_}//2012/gimli' + + satisfied, _ = workflow_state(gimli) + assert satisfied + satisfied, _ = workflow_state(f'{gimli}:succeeded') + assert satisfied + satisfied, _ = workflow_state(f'{gimli}:axe', is_message=True) + assert satisfied + _, err = capsys.readouterr() + assert not err + # Output label selector falls back to message + # (won't work if messsage != output label) + satisfied, _ = workflow_state(f'{gimli}:axe', is_trigger=True) + assert satisfied + _, err = capsys.readouterr() + assert output_fallback_msg in err + + +def test__workflow_state_backcompat(tmp_run_dir: 'Callable'): + """Test the _workflow_state_backcompat & suite_state functions on a + *current* Cylc database.""" + id_ = 'dune' + run_dir: Path = tmp_run_dir(id_) + db_file = run_dir / 'log' / 'db' + db_file.parent.mkdir(exist_ok=True) + with CylcWorkflowDAO(db_file, create_tables=True) as dao: + conn = dao.connect() + conn.executemany( + r'INSERT INTO "workflow_params" VALUES(?,?);', + [('cylc_version', '8.3.0'), + ('cycle_point_format', '%Y'), + ('cycle_point_tz', 'Z')] + ) + conn.execute(r""" + INSERT INTO "task_states" VALUES( + 'arrakis','2012','[1]','2023-01-30T18:19:15Z', + '2023-01-30T18:19:15Z',1,'succeeded',0,0 + ); + """) + conn.execute(r""" + INSERT INTO "task_outputs" VALUES( + '2012','arrakis','[1]', + '{"submitted": "submitted", "started": "started", "succeeded": "succeeded", "paul": "lisan al-gaib"}' + ); + """) + conn.commit() + + func: Any + for func in (_workflow_state_backcompat, suite_state): + satisfied, _ = func(id_, 'arrakis', '2012') + assert satisfied + satisfied, _ = func(id_, 'arrakis', '2012', status='succeeded') + assert satisfied + # Both output label and message work + satisfied, _ = func(id_, 'arrakis', '2012', message='paul') + assert satisfied + satisfied, _ = func(id_, 'arrakis', '2012', message='lisan al-gaib') + assert satisfied + + +def test_validate_ok(): + """Validate returns ok with valid args.""" + validate({ + 'workflow_task_id': 'foo//1/bar', + 'offset': 'PT1H', + 'flow_num': 44, + }) + + +@pytest.mark.parametrize( + 'id_', (('foo//1'),) +) +def test_validate_fail_bad_id(id_): + """Validation failure for bad id""" + with pytest.raises(WorkflowConfigError, match='Full ID needed'): + validate({ + 'workflow_task_id': id_, + 'offset': 'PT1H', + 'flow_num': 44, + }) + + +@pytest.mark.parametrize( + 'flow_num', ((4.25260), ('Belguim')) +) +def test_validate_fail_non_int_flow(flow_num): + """Validate failure for non integer flow numbers.""" + with pytest.raises(WorkflowConfigError, match='must be an integer'): + validate({ + 'workflow_task_id': 'foo//1/bar', + 'offset': 'PT1H', + 'flow_num': flow_num, + }) diff --git a/tox.ini b/tox.ini index 95222eeb859..d9954dbb7e8 100644 --- a/tox.ini +++ b/tox.ini @@ -14,9 +14,9 @@ ignore= per-file-ignores= ; TYPE_CHECKING block suggestions - tests/*: TC001 + tests/*: TC001, TC002, TC003 ; for clarity we don't merge 'with Conf():' context trees - tests/unit/parsec/*: SIM117 + tests/unit/parsec/*: SIM117, TC001, TC002, TC003 exclude= build,