cleared up test_runner

This commit is contained in:
PatchOfScotland
2023-02-24 10:38:02 +01:00
parent b9063e1743
commit 486c92f6da
9 changed files with 1840 additions and 45 deletions

8
.gitignore vendored
View File

@ -53,6 +53,14 @@ coverage.xml
tests/test_monitor_base
tests/test_job_queue_dir
tests/test_job_output
tests/test_files
tests/test_data
tests/job_output
tests/job_queue
tests/Backup*
# hdf5
*.h5
# Translations
*.mo

View File

@ -18,7 +18,8 @@ from core.base_conductor import BaseConductor
from core.base_handler import BaseHandler
from core.base_monitor import BaseMonitor
from core.correctness.vars import DEBUG_WARNING, DEBUG_INFO, EVENT_TYPE, \
VALID_CHANNELS, META_FILE, DEFAULT_JOB_OUTPUT_DIR, DEFAULT_JOB_QUEUE_DIR
VALID_CHANNELS, META_FILE, DEFAULT_JOB_OUTPUT_DIR, DEFAULT_JOB_QUEUE_DIR, \
EVENT_PATH
from core.correctness.validation import check_type, valid_list, valid_dir_path
from functionality.debug import setup_debugging, print_debug
from functionality.file_io import make_dir, read_yaml
@ -50,7 +51,6 @@ class MeowRunner:
handlers and conductors according to what events and jobs they produce
or consume."""
self._is_valid_job_queue_dir(job_queue_dir)
self._is_valid_job_output_dir(job_output_dir)
@ -200,16 +200,17 @@ class MeowRunner:
"""Function for a given handler to handle a given event, without
crashing the runner in the event of a problem."""
print_debug(self._print_target, self.debug_level,
f"Starting handling for event: '{event[EVENT_TYPE]}'", DEBUG_INFO)
f"Starting handling for {event[EVENT_TYPE]} event: "
f"'{event[EVENT_PATH]}'", DEBUG_INFO)
try:
handler.handle(event)
print_debug(self._print_target, self.debug_level,
f"Completed handling for event: '{event[EVENT_TYPE]}'",
DEBUG_INFO)
f"Completed handling for {event[EVENT_TYPE]} event: "
f"'{event[EVENT_PATH]}'", DEBUG_INFO)
except Exception as e:
print_debug(self._print_target, self.debug_level,
"Something went wrong during handling for event "
f"'{event[EVENT_TYPE]}'. {e}", DEBUG_INFO)
f"Something went wrong during handling for {event[EVENT_TYPE]}"
f" event '{event[EVENT_PATH]}'. {e}", DEBUG_INFO)
def execute_job(self, conductor:BaseConductor, job_dir:str)->None:
"""Function for a given conductor to execute a given job, without

View File

@ -26,7 +26,7 @@ KEYWORD_DIR = "{DIR}"
KEYWORD_REL_DIR = "{REL_DIR}"
KEYWORD_FILENAME = "{FILENAME}"
KEYWORD_PREFIX = "{PREFIX}"
KEYWORD_BASE = "{VGRID}"
KEYWORD_BASE = "{BASE}"
KEYWORD_EXTENSION = "{EXTENSION}"
KEYWORD_JOB = "{JOB}"

View File

@ -27,7 +27,7 @@ from core.correctness.validation import check_type, valid_string, \
from core.correctness.vars import VALID_RECIPE_NAME_CHARS, \
VALID_VARIABLE_NAME_CHARS, FILE_EVENTS, FILE_CREATE_EVENT, \
FILE_MODIFY_EVENT, FILE_MOVED_EVENT, DEBUG_INFO, \
FILE_RETROACTIVE_EVENT, SHA256
FILE_RETROACTIVE_EVENT, SHA256, VALID_PATH_CHARS, FILE_CLOSED_EVENT
from functionality.debug import setup_debugging, print_debug
from functionality.hashing import get_file_hash
from functionality.meow import create_rule, create_watchdog_event
@ -37,7 +37,8 @@ _DEFAULT_MASK = [
FILE_CREATE_EVENT,
FILE_MODIFY_EVENT,
FILE_MOVED_EVENT,
FILE_RETROACTIVE_EVENT
FILE_RETROACTIVE_EVENT,
FILE_CLOSED_EVENT
]
class FileEventPattern(BasePattern):
@ -64,7 +65,7 @@ class FileEventPattern(BasePattern):
def _is_valid_triggering_path(self, triggering_path:str)->None:
"""Validation check for 'triggering_path' variable from main
constructor."""
valid_path(triggering_path)
valid_string(triggering_path, VALID_PATH_CHARS+'*', min_length=1)
if len(triggering_path) < 1:
raise ValueError (
f"triggiering path '{triggering_path}' is too short. "
@ -170,8 +171,9 @@ class WatchdogMonitor(BaseMonitor):
def match(self, event)->None:
"""Function to determine if a given event matches the current rules."""
src_path = event.src_path
event_type = "dir_"+ event.event_type if event.is_directory \
else "file_" + event.event_type
prepend = "dir_" if event.is_directory else "file_"
event_types = [prepend+i for i in event.event_type]
# Remove the base dir from the path as trigger paths are given relative
# to that
@ -185,7 +187,8 @@ class WatchdogMonitor(BaseMonitor):
for rule in self._rules.values():
# Skip events not within the event mask
if event_type not in rule.pattern.event_mask:
if any(i in event_types for i in rule.pattern.event_mask) \
!= True:
continue
# Use regex to match event paths against rule paths
@ -205,8 +208,8 @@ class WatchdogMonitor(BaseMonitor):
get_file_hash(event.src_path, SHA256)
)
print_debug(self._print_target, self.debug_level,
f"Event at {src_path} of type {event_type} hit rule "
f"{rule.name}", DEBUG_INFO)
f"Event at {src_path} hit rule {rule.name}",
DEBUG_INFO)
# Send the event to the runner
self.to_runner.send(meow_event)
@ -543,32 +546,63 @@ class WatchdogEventHandler(PatternMatchingEventHandler):
monitor. After each event we wait for '_settletime', to catch
subsequent events at the same location, so as to not swamp the system
with repeated events."""
self._recent_jobs_lock.acquire()
try:
if event.src_path in self._recent_jobs:
recent_timestamp = self._recent_jobs[event.src_path]
difference = event.time_stamp - recent_timestamp
# Discard the event if we already have a recent event at this
# same path. Update the most recent time, so we can hopefully
# wait till events have stopped happening
if difference <= self._settletime:
self._recent_jobs[event.src_path] = \
max(recent_timestamp, event.time_stamp)
if event.time_stamp > self._recent_jobs[event.src_path][0]:
self._recent_jobs[event.src_path][0] = event.time_stamp
self._recent_jobs[event.src_path][1].add(event.event_type)
else:
self._recent_jobs_lock.release()
return
else:
self._recent_jobs[event.src_path] = event.time_stamp
else:
self._recent_jobs[event.src_path] = event.time_stamp
self._recent_jobs[event.src_path] = \
[event.time_stamp, {event.event_type}]
except Exception as ex:
self._recent_jobs_lock.release()
raise Exception(ex)
self._recent_jobs_lock.release()
# If we did not have a recent event, then send it on to the monitor
sleep(self._settletime)
self._recent_jobs_lock.acquire()
try:
if event.src_path in self._recent_jobs \
and event.time_stamp < self._recent_jobs[event.src_path][0]:
self._recent_jobs_lock.release()
return
except Exception as ex:
self._recent_jobs_lock.release()
raise Exception(ex)
event.event_type = self._recent_jobs[event.src_path][1]
self._recent_jobs_lock.release()
self.monitor.match(event)
# recent_timestamp = self._recent_jobs[event.src_path]
# difference = event.time_stamp - recent_timestamp
#
# # Discard the event if we already have a recent event at this
# # same path. Update the most recent time, so we can hopefully
# # wait till events have stopped happening
# if difference <= self._settletime:
# self._recent_jobs[event.src_path] = \
# max(recent_timestamp, event.time_stamp)
# self._recent_jobs_lock.release()
# return
# else:
# self._recent_jobs[event.src_path] = event.time_stamp
# else:
# self._recent_jobs[event.src_path] = event.time_stamp
# except Exception as ex:
# self._recent_jobs_lock.release()
# raise Exception(ex)
# self._recent_jobs_lock.release()
#
# # If we did not have a recent event, then send it on to the monitor
# self.monitor.match(event)
def handle_event(self, event):
"""Handler function, called by all specific event functions. Will
attach a timestamp to the event immediately, and attempt to start a

View File

@ -2,3 +2,4 @@ pytest
papermill
nbformat
pyyaml
watchdog

File diff suppressed because it is too large Load Diff

View File

@ -13,7 +13,7 @@ cd $script_dir
search_dir=.
for entry in "$search_dir"/*
do
if [[ $entry == ./test* ]] && [[ $entry != ./$script_name ]] && [[ $entry != ./shared.py ]];
if [[ $entry == ./test* ]] && [[ -f $entry ]] && [[ $entry != ./$script_name ]] && [[ $entry != ./shared.py ]];
then
pytest $entry "-W ignore::DeprecationWarning"
fi

View File

@ -406,7 +406,7 @@ class MeowTests(unittest.TestCase):
error = read_file(error_file)
self.assertEqual(error,
"Recieved incorrectly setup job.\n\n[Errno 2] No such file or "
f"directory: 'test_job_queue_dir/{job_dict[JOB_ID]}/job.yml'")
f"directory: 'test_job_queue_dir{os.path.sep}{job_dict[JOB_ID]}{os.path.sep}job.yml'")
# Test LocalPythonConductor does not execute jobs with bad functions
def testLocalPythonConductorBadFunc(self)->None:

View File

@ -1,8 +1,11 @@
import io
import importlib
import os
import unittest
from random import shuffle
from shutil import copy
from time import sleep
from core.base_conductor import BaseConductor
@ -12,17 +15,107 @@ from conductors import LocalPythonConductor
from core.correctness.vars import get_result_file, \
JOB_TYPE_PAPERMILL, JOB_ERROR, META_FILE, JOB_TYPE_PYTHON, JOB_CREATE_TIME
from core.runner import MeowRunner
from functionality.file_io import make_dir, read_file, read_notebook, read_yaml
from functionality.file_io import make_dir, read_file, read_notebook, \
read_yaml, write_file, lines_to_string
from functionality.meow import create_parameter_sweep
from functionality.requirements import create_python_requirements
from patterns.file_event_pattern import WatchdogMonitor, FileEventPattern
from recipes.jupyter_notebook_recipe import PapermillHandler, \
JupyterNotebookRecipe
from recipes.python_recipe import PythonHandler, PythonRecipe
from shared import setup, teardown, \
TEST_JOB_QUEUE, TEST_JOB_OUTPUT, TEST_MONITOR_BASE, \
APPENDING_NOTEBOOK, COMPLETE_PYTHON_SCRIPT, TEST_DIR
from shared import setup, teardown, backup_before_teardown, \
TEST_JOB_QUEUE, TEST_JOB_OUTPUT, TEST_MONITOR_BASE, MAKER_RECIPE, \
APPENDING_NOTEBOOK, COMPLETE_PYTHON_SCRIPT, TEST_DIR, FILTER_RECIPE, \
POROSITY_CHECK_NOTEBOOK, SEGMENT_FOAM_NOTEBOOK, GENERATOR_NOTEBOOK, \
FOAM_PORE_ANALYSIS_NOTEBOOK, IDMC_UTILS_MODULE, TEST_DATA, GENERATE_SCRIPT
pattern_check = FileEventPattern(
"pattern_check",
os.path.join("foam_ct_data", "*"),
"recipe_check",
"input_filename",
parameters={
"output_filedir_accepted":
os.path.join("{BASE}", "foam_ct_data_accepted"),
"output_filedir_discarded":
os.path.join("{BASE}", "foam_ct_data_discarded"),
"porosity_lower_threshold": 0.8,
"utils_path": os.path.join("{BASE}", "idmc_utils_module.py")
})
pattern_segment = FileEventPattern(
"pattern_segment",
os.path.join("foam_ct_data_accepted", "*"),
"recipe_segment",
"input_filename",
parameters={
"output_filedir": os.path.join("{BASE}", "foam_ct_data_segmented"),
"input_filedir": os.path.join("{BASE}", "foam_ct_data"),
"utils_path": os.path.join("{BASE}", "idmc_utils_module.py")
})
pattern_analysis = FileEventPattern(
"pattern_analysis",
os.path.join("foam_ct_data_segmented", "*"),
"recipe_analysis",
"input_filename",
parameters={
"output_filedir": os.path.join("{BASE}", "foam_ct_data_pore_analysis"),
"utils_path": os.path.join("{BASE}", "idmc_utils_module.py")
})
pattern_regenerate = FileEventPattern(
"pattern_regenerate",
os.path.join("foam_ct_data_discarded", "*"),
"recipe_generator",
"discarded",
parameters={
"dest_dir": os.path.join("{BASE}", "foam_ct_data"),
"utils_path": os.path.join("{BASE}", "idmc_utils_module.py"),
"gen_path": os.path.join("{BASE}", "generator.py"),
"test_data": os.path.join(TEST_DATA, "foam_ct_data"),
"vx": 64,
"vy": 64,
"vz": 64,
"res": 3/64,
"chance_good": 1,
"chance_small": 0,
"chance_big": 0
})
recipe_check_key, recipe_check_req = create_python_requirements(
modules=["numpy", "importlib", "matplotlib"])
recipe_check = JupyterNotebookRecipe(
'recipe_check',
POROSITY_CHECK_NOTEBOOK,
requirements={recipe_check_key: recipe_check_req}
)
recipe_segment_key, recipe_segment_req = create_python_requirements(
modules=["numpy", "importlib", "matplotlib", "scipy", "skimage"])
recipe_segment = JupyterNotebookRecipe(
'recipe_segment',
SEGMENT_FOAM_NOTEBOOK,
requirements={recipe_segment_key: recipe_segment_req}
)
recipe_analysis_key, recipe_analysis_req = create_python_requirements(
modules=["numpy", "importlib", "matplotlib", "scipy", "skimage"])
recipe_analysis = JupyterNotebookRecipe(
'recipe_analysis',
FOAM_PORE_ANALYSIS_NOTEBOOK,
requirements={recipe_analysis_key: recipe_analysis_req}
)
recipe_generator_key, recipe_generator_req = create_python_requirements(
modules=["numpy", "matplotlib", "random"])
recipe_generator = JupyterNotebookRecipe(
'recipe_generator',
GENERATOR_NOTEBOOK,
requirements={recipe_generator_key: recipe_generator_req}
)
class MeowTests(unittest.TestCase):
def setUp(self)->None:
super().setUp()
@ -171,7 +264,7 @@ class MeowTests(unittest.TestCase):
"infile",
parameters={
"extra":"A line from a test Pattern",
"outfile":os.path.join("{VGRID}", "output", "{FILENAME}")
"outfile":os.path.join("{BASE}", "output", "{FILENAME}")
})
recipe = JupyterNotebookRecipe(
"recipe_one", APPENDING_NOTEBOOK)
@ -257,13 +350,13 @@ class MeowTests(unittest.TestCase):
"infile",
parameters={
"extra":"A line from Pattern 1",
"outfile":os.path.join("{VGRID}", "middle", "{FILENAME}")
"outfile":os.path.join("{BASE}", "middle", "{FILENAME}")
})
pattern_two = FileEventPattern(
"pattern_two", os.path.join("middle", "A.txt"), "recipe_one", "infile",
parameters={
"extra":"A line from Pattern 2",
"outfile":os.path.join("{VGRID}", "output", "{FILENAME}")
"outfile":os.path.join("{BASE}", "output", "{FILENAME}")
})
recipe = JupyterNotebookRecipe(
"recipe_one", APPENDING_NOTEBOOK)
@ -367,7 +460,7 @@ class MeowTests(unittest.TestCase):
"pattern_one", os.path.join("start", "A.txt"), "recipe_one", "infile",
parameters={
"num":10000,
"outfile":os.path.join("{VGRID}", "output", "{FILENAME}")
"outfile":os.path.join("{BASE}", "output", "{FILENAME}")
})
recipe = PythonRecipe(
"recipe_one", COMPLETE_PYTHON_SCRIPT
@ -459,7 +552,7 @@ class MeowTests(unittest.TestCase):
"infile",
parameters={
"num":250,
"outfile":os.path.join("{VGRID}", "middle", "{FILENAME}")
"outfile":os.path.join("{BASE}", "middle", "{FILENAME}")
})
pattern_two = FileEventPattern(
"pattern_two",
@ -468,7 +561,7 @@ class MeowTests(unittest.TestCase):
"infile",
parameters={
"num":40,
"outfile":os.path.join("{VGRID}", "output", "{FILENAME}")
"outfile":os.path.join("{BASE}", "output", "{FILENAME}")
})
recipe = PythonRecipe(
"recipe_one", COMPLETE_PYTHON_SCRIPT
@ -595,7 +688,7 @@ class MeowTests(unittest.TestCase):
"infile",
sweep=create_parameter_sweep("num", 1000, 10000, 200),
parameters={
"outfile":os.path.join("{VGRID}", "output", "{FILENAME}")
"outfile":os.path.join("{BASE}", "output", "{FILENAME}")
})
recipe = PythonRecipe(
"recipe_one", COMPLETE_PYTHON_SCRIPT
@ -678,7 +771,608 @@ class MeowTests(unittest.TestCase):
output_path = os.path.join(TEST_MONITOR_BASE, "output", "A.txt")
self.assertTrue(os.path.exists(output_path))
# TODO adding tests with numpy or other external dependency
def testSelfModifyingAnalysis(self)->None:
maker_pattern = FileEventPattern(
"maker_pattern",
os.path.join("confs", "*.yml"),
"maker_recipe",
"input_yaml",
parameters={
"meow_dir": "self-modifying",
"filter_recipe": "recipe_filter",
"recipe_input_image": "input_image",
"recipe_output_image": "output_image",
"recipe_args": "args",
"recipe_method": "method"
})
patterns = {
"maker_pattern": maker_pattern,
}
filter_recipe = JupyterNotebookRecipe(
"filter_recipe", FILTER_RECIPE
)
maker_recipe = JupyterNotebookRecipe(
"maker_recipe", MAKER_RECIPE
)
recipes = {
filter_recipe.name: filter_recipe,
maker_recipe.name: maker_recipe
}
runner_debug_stream = io.StringIO("")
runner = MeowRunner(
WatchdogMonitor(
TEST_MONITOR_BASE,
patterns,
recipes,
settletime=1
),
PythonHandler(
job_queue_dir=TEST_JOB_QUEUE
),
LocalPythonConductor(),
job_queue_dir=TEST_JOB_QUEUE,
job_output_dir=TEST_JOB_OUTPUT,
print=runner_debug_stream,
logging=3
)
# TODO finish me
# runner.start()
# Test some actual scientific analysis, but in a simple progression
def testScientificAnalysisAllGood(self)->None:
patterns = {
'pattern_check': pattern_check,
'pattern_segment': pattern_segment,
'pattern_analysis': pattern_analysis,
'pattern_regenerate': pattern_regenerate
}
recipes = {
'recipe_check': recipe_check,
'recipe_segment': recipe_segment,
'recipe_analysis': recipe_analysis,
'recipe_generator': recipe_generator
}
runner_debug_stream = io.StringIO("")
runner = MeowRunner(
WatchdogMonitor(
TEST_MONITOR_BASE,
patterns,
recipes,
settletime=1
),
PapermillHandler(
job_queue_dir=TEST_JOB_QUEUE
),
LocalPythonConductor(),
job_queue_dir=TEST_JOB_QUEUE,
job_output_dir=TEST_JOB_OUTPUT,
print=runner_debug_stream,
logging=3
)
good = 3
big = 0
small = 0
vx = 64
vy = 64
vz = 64
res = 3/vz
backup_data_dir = os.path.join(TEST_DATA, "foam_ct_data")
foam_data_dir = os.path.join(TEST_MONITOR_BASE, "foam_ct_data")
make_dir(foam_data_dir)
write_file(lines_to_string(IDMC_UTILS_MODULE),
os.path.join(TEST_MONITOR_BASE, "idmc_utils_module.py"))
gen_path = os.path.join(TEST_MONITOR_BASE, "generator.py")
write_file(lines_to_string(GENERATE_SCRIPT), gen_path)
u_spec = importlib.util.spec_from_file_location("gen", gen_path)
gen = importlib.util.module_from_spec(u_spec)
u_spec.loader.exec_module(gen)
all_data = [1000] * good + [100] * big + [10000] * small
shuffle(all_data)
for i, val in enumerate(all_data):
filename = f"foam_dataset_{i}_{val}_{vx}_{vy}_{vz}.npy"
backup_file = os.path.join(backup_data_dir, filename)
if not os.path.exists(backup_file):
gen.create_foam_data_file(backup_file, val, vx, vy, vz, res)
target_file = os.path.join(foam_data_dir, filename)
copy(backup_file, target_file)
self.assertEqual(len(os.listdir(foam_data_dir)), good + big + small)
runner.start()
idle_loops = 0
total_loops = 0
messages = None
while idle_loops < 15 and total_loops < 150:
sleep(1)
runner_debug_stream.seek(0)
new_messages = runner_debug_stream.readlines()
if messages == new_messages:
idle_loops += 1
else:
idle_loops = 0
messages = new_messages
total_loops += 1
for message in messages:
print(message.replace('\n', ''))
runner.stop()
print(f"total_loops:{total_loops}, idle_loops:{idle_loops}")
if len(os.listdir(TEST_JOB_OUTPUT)) != good * 3:
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-all_good-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-all_good-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-all_good-{TEST_MONITOR_BASE}")
self.assertEqual(len(os.listdir(TEST_JOB_OUTPUT)), good * 3)
for job_dir in os.listdir(TEST_JOB_OUTPUT):
metafile = os.path.join(TEST_JOB_OUTPUT, job_dir, META_FILE)
status = read_yaml(metafile)
if JOB_ERROR in status:
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-all_good-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-all_good-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-all_good-{TEST_MONITOR_BASE}")
self.assertNotIn(JOB_ERROR, status)
result_path = os.path.join(
TEST_JOB_OUTPUT, job_dir, get_result_file(JOB_TYPE_PAPERMILL))
self.assertTrue(os.path.exists(result_path))
# Test some actual scientific analysis, in a predicatable loop
def testScientificAnalysisPredictableLoop(self)->None:
patterns = {
'pattern_check': pattern_check,
'pattern_segment': pattern_segment,
'pattern_analysis': pattern_analysis,
'pattern_regenerate': pattern_regenerate
}
recipes = {
'recipe_check': recipe_check,
'recipe_segment': recipe_segment,
'recipe_analysis': recipe_analysis,
'recipe_generator': recipe_generator
}
runner_debug_stream = io.StringIO("")
runner = MeowRunner(
WatchdogMonitor(
TEST_MONITOR_BASE,
patterns,
recipes,
settletime=1
),
PapermillHandler(
job_queue_dir=TEST_JOB_QUEUE
),
LocalPythonConductor(),
job_queue_dir=TEST_JOB_QUEUE,
job_output_dir=TEST_JOB_OUTPUT,
print=runner_debug_stream,
logging=3
)
good = 10
big = 5
small = 0
vx = 64
vy = 64
vz = 64
res = 3/vz
backup_data_dir = os.path.join(TEST_DATA, "foam_ct_data")
make_dir(backup_data_dir)
foam_data_dir = os.path.join(TEST_MONITOR_BASE, "foam_ct_data")
make_dir(foam_data_dir)
write_file(lines_to_string(IDMC_UTILS_MODULE),
os.path.join(TEST_MONITOR_BASE, "idmc_utils_module.py"))
gen_path = os.path.join(TEST_MONITOR_BASE, "generator.py")
write_file(lines_to_string(GENERATE_SCRIPT), gen_path)
all_data = [1000] * good + [100] * big + [10000] * small
shuffle(all_data)
u_spec = importlib.util.spec_from_file_location("gen", gen_path)
gen = importlib.util.module_from_spec(u_spec)
u_spec.loader.exec_module(gen)
for i, val in enumerate(all_data):
filename = f"foam_dataset_{i}_{val}_{vx}_{vy}_{vz}.npy"
backup_file = os.path.join(backup_data_dir, filename)
if not os.path.exists(backup_file):
gen.create_foam_data_file(backup_file, val, vx, vy, vz, res)
target_file = os.path.join(foam_data_dir, filename)
copy(backup_file, target_file)
self.assertEqual(len(os.listdir(foam_data_dir)), good + big + small)
runner.start()
idle_loops = 0
total_loops = 0
messages = None
while idle_loops < 45 and total_loops < 600:
sleep(1)
runner_debug_stream.seek(0)
new_messages = runner_debug_stream.readlines()
if messages == new_messages:
idle_loops += 1
else:
idle_loops = 0
messages = new_messages
total_loops += 1
for message in messages:
print(message.replace('\n', ''))
runner.stop()
print(f"total_loops:{total_loops}, idle_loops:{idle_loops}")
jobs = len(os.listdir(TEST_JOB_OUTPUT))
if jobs != (good*3 + big*5 + small*5):
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-predictable-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-predictable-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-predictable-{TEST_MONITOR_BASE}")
self.assertEqual(jobs, good*3 + big*5 + small*5)
for job_dir in os.listdir(TEST_JOB_OUTPUT):
metafile = os.path.join(TEST_JOB_OUTPUT, job_dir, META_FILE)
status = read_yaml(metafile)
if JOB_ERROR in status:
print(status[JOB_ERROR])
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-predictable-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-predictable-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-predictable-{TEST_MONITOR_BASE}")
self.assertNotIn(JOB_ERROR, status)
result_path = os.path.join(
TEST_JOB_OUTPUT, job_dir, get_result_file(JOB_TYPE_PAPERMILL))
self.assertTrue(os.path.exists(result_path))
results = len(os.listdir(
os.path.join(TEST_MONITOR_BASE, "foam_ct_data_pore_analysis")))
if results != good+big+small:
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-predictable-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-predictable-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-predictable-{TEST_MONITOR_BASE}")
self.assertEqual(results, good+big+small)
# Test some actual scientific analysis, in an unpredicatable loop
def testScientificAnalysisRandomLoop(self)->None:
pattern_regenerate_random = FileEventPattern(
"pattern_regenerate_random",
os.path.join("foam_ct_data_discarded", "*"),
"recipe_generator",
"discarded",
parameters={
"dest_dir": os.path.join("{BASE}", "foam_ct_data"),
"utils_path": os.path.join("{BASE}", "idmc_utils_module.py"),
"gen_path": os.path.join("{BASE}", "generator.py"),
"test_data": os.path.join(TEST_DATA, "foam_ct_data"),
"vx": 64,
"vy": 64,
"vz": 64,
"res": 3/64,
"chance_good": 1,
"chance_small": 0,
"chance_big": 1
})
patterns = {
'pattern_check': pattern_check,
'pattern_segment': pattern_segment,
'pattern_analysis': pattern_analysis,
'pattern_regenerate_random': pattern_regenerate_random
}
recipes = {
'recipe_check': recipe_check,
'recipe_segment': recipe_segment,
'recipe_analysis': recipe_analysis,
'recipe_generator': recipe_generator
}
runner_debug_stream = io.StringIO("")
runner = MeowRunner(
WatchdogMonitor(
TEST_MONITOR_BASE,
patterns,
recipes,
settletime=1
),
PapermillHandler(
job_queue_dir=TEST_JOB_QUEUE
),
LocalPythonConductor(),
job_queue_dir=TEST_JOB_QUEUE,
job_output_dir=TEST_JOB_OUTPUT,
print=runner_debug_stream,
logging=3
)
good = 10
big = 5
small = 0
vx = 64
vy = 64
vz = 64
res = 3/vz
backup_data_dir = os.path.join(TEST_DATA, "foam_ct_data")
make_dir(backup_data_dir)
foam_data_dir = os.path.join(TEST_MONITOR_BASE, "foam_ct_data")
make_dir(foam_data_dir)
write_file(lines_to_string(IDMC_UTILS_MODULE),
os.path.join(TEST_MONITOR_BASE, "idmc_utils_module.py"))
gen_path = os.path.join(TEST_MONITOR_BASE, "generator.py")
write_file(lines_to_string(GENERATE_SCRIPT), gen_path)
all_data = [1000] * good + [100] * big + [10000] * small
shuffle(all_data)
u_spec = importlib.util.spec_from_file_location("gen", gen_path)
gen = importlib.util.module_from_spec(u_spec)
u_spec.loader.exec_module(gen)
for i, val in enumerate(all_data):
filename = f"foam_dataset_{i}_{val}_{vx}_{vy}_{vz}.npy"
backup_file = os.path.join(backup_data_dir, filename)
if not os.path.exists(backup_file):
gen.create_foam_data_file(backup_file, val, vx, vy, vz, res)
target_file = os.path.join(foam_data_dir, filename)
copy(backup_file, target_file)
self.assertEqual(len(os.listdir(foam_data_dir)), good + big + small)
runner.start()
idle_loops = 0
total_loops = 0
messages = None
while idle_loops < 60 and total_loops < 600:
sleep(1)
runner_debug_stream.seek(0)
new_messages = runner_debug_stream.readlines()
if messages == new_messages:
idle_loops += 1
else:
idle_loops = 0
messages = new_messages
total_loops += 1
for message in messages:
print(message.replace('\n', ''))
runner.stop()
print(f"total_loops:{total_loops}, idle_loops:{idle_loops}")
for job_dir in os.listdir(TEST_JOB_OUTPUT):
metafile = os.path.join(TEST_JOB_OUTPUT, job_dir, META_FILE)
status = read_yaml(metafile)
if JOB_ERROR in status:
print(status[JOB_ERROR])
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-random-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-random-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-random-{TEST_MONITOR_BASE}")
self.assertNotIn(JOB_ERROR, status)
result_path = os.path.join(
TEST_JOB_OUTPUT, job_dir, get_result_file(JOB_TYPE_PAPERMILL))
self.assertTrue(os.path.exists(result_path))
outputs = len(os.listdir(TEST_JOB_OUTPUT))
if outputs < good*3 + big*5 + small*5:
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-random-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-random-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-random-{TEST_MONITOR_BASE}")
self.assertTrue(outputs >= good*3 + big*5 + small*5)
results = len(os.listdir(
os.path.join(TEST_MONITOR_BASE, "foam_ct_data_pore_analysis")))
self.assertEqual(results, good+big+small)
# Test some actual scientific analysis, in an unpredicatable loop
def testScientificAnalysisMassiveRandomLoop(self)->None:
pattern_regenerate_random = FileEventPattern(
"pattern_regenerate_random",
os.path.join("foam_ct_data_discarded", "*"),
"recipe_generator",
"discarded",
parameters={
"dest_dir": os.path.join("{BASE}", "foam_ct_data"),
"utils_path": os.path.join("{BASE}", "idmc_utils_module.py"),
"gen_path": os.path.join("{BASE}", "generator.py"),
"test_data": os.path.join(TEST_DATA, "foam_ct_data"),
"vx": 32,
"vy": 32,
"vz": 32,
"res": 3/32,
"chance_good": 1,
"chance_small": 0,
"chance_big": 3
})
patterns = {
'pattern_check': pattern_check,
'pattern_segment': pattern_segment,
'pattern_analysis': pattern_analysis,
'pattern_regenerate_random': pattern_regenerate_random
}
recipes = {
'recipe_check': recipe_check,
'recipe_segment': recipe_segment,
'recipe_analysis': recipe_analysis,
'recipe_generator': recipe_generator
}
runner_debug_stream = io.StringIO("")
runner = MeowRunner(
WatchdogMonitor(
TEST_MONITOR_BASE,
patterns,
recipes,
settletime=1
),
PapermillHandler(
job_queue_dir=TEST_JOB_QUEUE
),
LocalPythonConductor(),
job_queue_dir=TEST_JOB_QUEUE,
job_output_dir=TEST_JOB_OUTPUT,
print=runner_debug_stream,
logging=3
)
good = 5
big = 15
small = 0
vx = 32
vy = 32
vz = 32
res = 3/vz
backup_data_dir = os.path.join(TEST_DATA, "foam_ct_data")
make_dir(backup_data_dir)
foam_data_dir = os.path.join(TEST_MONITOR_BASE, "foam_ct_data")
make_dir(foam_data_dir)
write_file(lines_to_string(IDMC_UTILS_MODULE),
os.path.join(TEST_MONITOR_BASE, "idmc_utils_module.py"))
gen_path = os.path.join(TEST_MONITOR_BASE, "generator.py")
write_file(lines_to_string(GENERATE_SCRIPT), gen_path)
all_data = [1000] * good + [100] * big + [10000] * small
shuffle(all_data)
u_spec = importlib.util.spec_from_file_location("gen", gen_path)
gen = importlib.util.module_from_spec(u_spec)
u_spec.loader.exec_module(gen)
for i, val in enumerate(all_data):
filename = f"foam_dataset_{i}_{val}_{vx}_{vy}_{vz}.npy"
backup_file = os.path.join(backup_data_dir, filename)
if not os.path.exists(backup_file):
gen.create_foam_data_file(backup_file, val, vx, vy, vz, res)
target_file = os.path.join(foam_data_dir, filename)
copy(backup_file, target_file)
self.assertEqual(len(os.listdir(foam_data_dir)), good + big + small)
runner.start()
idle_loops = 0
total_loops = 0
messages = None
while idle_loops < 60 and total_loops < 1200:
sleep(1)
runner_debug_stream.seek(0)
new_messages = runner_debug_stream.readlines()
if messages == new_messages:
idle_loops += 1
else:
idle_loops = 0
messages = new_messages
total_loops += 1
for message in messages:
print(message.replace('\n', ''))
runner.stop()
print(f"total_loops:{total_loops}, idle_loops:{idle_loops}")
for job_dir in os.listdir(TEST_JOB_OUTPUT):
metafile = os.path.join(TEST_JOB_OUTPUT, job_dir, META_FILE)
status = read_yaml(metafile)
if JOB_ERROR in status:
print(status[JOB_ERROR])
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-massive-random-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-massive-random-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-massive-random-{TEST_MONITOR_BASE}")
self.assertNotIn(JOB_ERROR, status)
result_path = os.path.join(
TEST_JOB_OUTPUT, job_dir, get_result_file(JOB_TYPE_PAPERMILL))
self.assertTrue(os.path.exists(result_path))
outputs = len(os.listdir(TEST_JOB_OUTPUT))
if outputs < good*3 + big*5 + small*5:
backup_before_teardown(TEST_JOB_OUTPUT,
f"Backup-massive-random-{TEST_JOB_OUTPUT}")
backup_before_teardown(TEST_JOB_QUEUE,
f"Backup-massive-random-{TEST_JOB_QUEUE}")
backup_before_teardown(TEST_MONITOR_BASE,
f"Backup-massive-random-{TEST_MONITOR_BASE}")
self.assertTrue(outputs >= good*3 + big*5 + small*5)
results = len(os.listdir(
os.path.join(TEST_MONITOR_BASE, "foam_ct_data_pore_analysis")))
self.assertEqual(results, good+big+small)
# TODO test getting job cannot handle
# TODO test getting event cannot handle
# TODO test with several matched monitors