added standardised job creation
This commit is contained in:
@ -202,11 +202,14 @@ class BaseHandleTests(unittest.TestCase):
|
||||
TestHandler()
|
||||
|
||||
class FullTestHandler(BaseHandler):
|
||||
def handle(self, event):
|
||||
pass
|
||||
def valid_handle_criteria(self, event:Dict[str,Any]
|
||||
)->Tuple[bool,str]:
|
||||
pass
|
||||
def get_created_job_type(self)->str:
|
||||
pass
|
||||
def create_job_recipe_file(self, job_dir:str, event:Dict[str,Any],
|
||||
params_dict:Dict[str,Any])->str:
|
||||
pass
|
||||
|
||||
FullTestHandler()
|
||||
|
||||
@ -233,9 +236,6 @@ class BaseConductorTests(unittest.TestCase):
|
||||
TestConductor()
|
||||
|
||||
class FullTestConductor(BaseConductor):
|
||||
def execute(self, job_dir:str)->None:
|
||||
pass
|
||||
|
||||
def valid_execute_criteria(self, job:Dict[str,Any]
|
||||
)->Tuple[bool,str]:
|
||||
pass
|
||||
|
@ -4,31 +4,31 @@ import stat
|
||||
import unittest
|
||||
|
||||
from datetime import datetime
|
||||
from multiprocessing import Pipe
|
||||
from typing import Dict
|
||||
|
||||
from meow_base.core.vars import JOB_TYPE_PYTHON, SHA256, \
|
||||
JOB_PARAMETERS, PYTHON_FUNC, JOB_ID, BACKUP_JOB_ERROR_FILE, \
|
||||
JOB_EVENT, META_FILE, PARAMS_FILE, JOB_STATUS, JOB_ERROR, JOB_TYPE, \
|
||||
JOB_EVENT, META_FILE, JOB_STATUS, JOB_ERROR, JOB_TYPE, \
|
||||
JOB_PATTERN, STATUS_DONE, JOB_TYPE_PAPERMILL, JOB_RECIPE, JOB_RULE, \
|
||||
JOB_CREATE_TIME, JOB_REQUIREMENTS, EVENT_PATH, EVENT_RULE, EVENT_TYPE, \
|
||||
EVENT_TYPE_WATCHDOG, JOB_TYPE_BASH, \
|
||||
get_base_file, get_result_file, get_job_file
|
||||
EVENT_TYPE_WATCHDOG, JOB_TYPE_BASH, JOB_FILE
|
||||
from meow_base.conductors import LocalPythonConductor, LocalBashConductor
|
||||
from meow_base.functionality.file_io import read_file, read_yaml, write_file, \
|
||||
write_notebook, write_yaml, lines_to_string, make_dir
|
||||
write_yaml, lines_to_string, make_dir, threadsafe_read_status
|
||||
from meow_base.functionality.hashing import get_hash
|
||||
from meow_base.functionality.meow import create_watchdog_event, create_job, \
|
||||
from meow_base.functionality.meow import create_watchdog_event, create_job_metadata_dict, \
|
||||
create_rule
|
||||
from meow_base.functionality.parameterisation import parameterize_bash_script
|
||||
from meow_base.patterns.file_event_pattern import FileEventPattern
|
||||
from meow_base.recipes.jupyter_notebook_recipe import JupyterNotebookRecipe, \
|
||||
papermill_job_func
|
||||
from meow_base.recipes.python_recipe import PythonRecipe, python_job_func
|
||||
from meow_base.recipes.bash_recipe import BashRecipe, assemble_bash_job_script
|
||||
PapermillHandler
|
||||
from meow_base.recipes.python_recipe import PythonRecipe, PythonHandler
|
||||
from meow_base.recipes.bash_recipe import BashRecipe, BashHandler
|
||||
from shared import TEST_MONITOR_BASE, APPENDING_NOTEBOOK, TEST_JOB_OUTPUT, \
|
||||
TEST_JOB_QUEUE, COMPLETE_PYTHON_SCRIPT, BAREBONES_PYTHON_SCRIPT, \
|
||||
BAREBONES_NOTEBOOK, COMPLETE_BASH_SCRIPT, BAREBONES_BASH_SCRIPT, \
|
||||
setup, teardown
|
||||
setup, teardown, count_non_locks
|
||||
|
||||
def failing_func():
|
||||
raise Exception("bad function")
|
||||
@ -58,10 +58,16 @@ class PythonTests(unittest.TestCase):
|
||||
|
||||
# Test LocalPythonConductor executes valid python jobs
|
||||
def testLocalPythonConductorValidPythonJob(self)->None:
|
||||
from_handler_to_runner_reader, from_handler_to_runner_writer = Pipe()
|
||||
bh = PythonHandler(job_queue_dir=TEST_JOB_QUEUE)
|
||||
bh.to_runner_job = from_handler_to_runner_writer
|
||||
|
||||
conductor_to_test_conductor, conductor_to_test_test = Pipe(duplex=True)
|
||||
lpc = LocalPythonConductor(
|
||||
job_queue_dir=TEST_JOB_QUEUE,
|
||||
job_output_dir=TEST_JOB_OUTPUT
|
||||
)
|
||||
lpc.to_runner_job = conductor_to_test_conductor
|
||||
|
||||
file_path = os.path.join(TEST_MONITOR_BASE, "test")
|
||||
result_path = os.path.join(TEST_MONITOR_BASE, "output")
|
||||
@ -91,37 +97,34 @@ class PythonTests(unittest.TestCase):
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
JOB_TYPE_PYTHON,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params_dict,
|
||||
PYTHON_FUNC:python_job_func
|
||||
}
|
||||
event = create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
make_dir(job_dir)
|
||||
bh.setup_job(event, params_dict)
|
||||
|
||||
param_file = os.path.join(job_dir, PARAMS_FILE)
|
||||
write_yaml(params_dict, param_file)
|
||||
lpc.start()
|
||||
|
||||
meta_path = os.path.join(job_dir, META_FILE)
|
||||
write_yaml(job_dict, meta_path)
|
||||
# Get valid job
|
||||
if from_handler_to_runner_reader.poll(3):
|
||||
job_queue_dir = from_handler_to_runner_reader.recv()
|
||||
|
||||
base_file = os.path.join(job_dir, get_base_file(JOB_TYPE_PYTHON))
|
||||
write_file(lines_to_string(COMPLETE_PYTHON_SCRIPT), base_file)
|
||||
# Send it to conductor
|
||||
if conductor_to_test_test.poll(3):
|
||||
_ = conductor_to_test_test.recv()
|
||||
conductor_to_test_test.send(job_queue_dir)
|
||||
|
||||
lpc.execute(job_dir)
|
||||
# Wait for job to complete
|
||||
if conductor_to_test_test.poll(3):
|
||||
_ = conductor_to_test_test.recv()
|
||||
conductor_to_test_test.send(1)
|
||||
|
||||
self.assertFalse(os.path.exists(job_dir))
|
||||
|
||||
job_output_dir = os.path.join(TEST_JOB_OUTPUT, job_dict[JOB_ID])
|
||||
job_output_dir = job_queue_dir.replace(TEST_JOB_QUEUE, TEST_JOB_OUTPUT)
|
||||
|
||||
self.assertFalse(os.path.exists(job_queue_dir))
|
||||
self.assertTrue(os.path.exists(job_output_dir))
|
||||
|
||||
meta_path = os.path.join(job_output_dir, META_FILE)
|
||||
@ -129,26 +132,33 @@ class PythonTests(unittest.TestCase):
|
||||
status = read_yaml(meta_path)
|
||||
self.assertIsInstance(status, Dict)
|
||||
self.assertIn(JOB_STATUS, status)
|
||||
self.assertEqual(status[JOB_STATUS], STATUS_DONE)
|
||||
|
||||
print(status)
|
||||
|
||||
self.assertEqual(status[JOB_STATUS], STATUS_DONE)
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, get_base_file(JOB_TYPE_PYTHON))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, PARAMS_FILE)))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, get_job_file(JOB_TYPE_PYTHON))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, get_result_file(JOB_TYPE_PYTHON))))
|
||||
|
||||
print(os.listdir(job_output_dir))
|
||||
self.assertEqual(count_non_locks(job_output_dir), 4)
|
||||
for f in [META_FILE, "recipe.py", "output.log", "job.sh"]:
|
||||
self.assertTrue(os.path.exists(os.path.join(job_output_dir, f)))
|
||||
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
result = read_file(result_path)
|
||||
self.assertEqual(result, "25293.75")
|
||||
|
||||
# Test LocalPythonConductor executes valid papermill jobs
|
||||
def testLocalPythonConductorValidPapermillJob(self)->None:
|
||||
from_handler_to_runner_reader, from_handler_to_runner_writer = Pipe()
|
||||
bh = PapermillHandler(job_queue_dir=TEST_JOB_QUEUE)
|
||||
bh.to_runner_job = from_handler_to_runner_writer
|
||||
|
||||
conductor_to_test_conductor, conductor_to_test_test = Pipe(duplex=True)
|
||||
lpc = LocalPythonConductor(
|
||||
job_queue_dir=TEST_JOB_QUEUE,
|
||||
job_output_dir=TEST_JOB_OUTPUT
|
||||
)
|
||||
lpc.to_runner_job = conductor_to_test_conductor
|
||||
|
||||
file_path = os.path.join(TEST_MONITOR_BASE, "test")
|
||||
result_path = os.path.join(TEST_MONITOR_BASE, "output", "test")
|
||||
@ -178,38 +188,34 @@ class PythonTests(unittest.TestCase):
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params_dict,
|
||||
PYTHON_FUNC:papermill_job_func
|
||||
}
|
||||
event = create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
make_dir(job_dir)
|
||||
bh.setup_job(event, params_dict)
|
||||
|
||||
param_file = os.path.join(job_dir, PARAMS_FILE)
|
||||
write_yaml(params_dict, param_file)
|
||||
lpc.start()
|
||||
|
||||
meta_path = os.path.join(job_dir, META_FILE)
|
||||
write_yaml(job_dict, meta_path)
|
||||
# Get valid job
|
||||
if from_handler_to_runner_reader.poll(3):
|
||||
job_queue_dir = from_handler_to_runner_reader.recv()
|
||||
|
||||
base_file = os.path.join(job_dir, get_base_file(JOB_TYPE_PAPERMILL))
|
||||
write_notebook(APPENDING_NOTEBOOK, base_file)
|
||||
# Send it to conductor
|
||||
if conductor_to_test_test.poll(3):
|
||||
_ = conductor_to_test_test.recv()
|
||||
conductor_to_test_test.send(job_queue_dir)
|
||||
|
||||
lpc.execute(job_dir)
|
||||
# Wait for job to complete
|
||||
if conductor_to_test_test.poll(3):
|
||||
_ = conductor_to_test_test.recv()
|
||||
conductor_to_test_test.send(1)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
self.assertFalse(os.path.exists(job_dir))
|
||||
|
||||
job_output_dir = os.path.join(TEST_JOB_OUTPUT, job_dict[JOB_ID])
|
||||
job_output_dir = job_queue_dir.replace(TEST_JOB_QUEUE, TEST_JOB_OUTPUT)
|
||||
|
||||
self.assertFalse(os.path.exists(job_queue_dir))
|
||||
self.assertTrue(os.path.exists(job_output_dir))
|
||||
|
||||
meta_path = os.path.join(job_output_dir, META_FILE)
|
||||
@ -219,139 +225,13 @@ class PythonTests(unittest.TestCase):
|
||||
self.assertIn(JOB_STATUS, status)
|
||||
self.assertEqual(status[JOB_STATUS], STATUS_DONE)
|
||||
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, get_base_file(JOB_TYPE_PAPERMILL))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, PARAMS_FILE)))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, get_job_file(JOB_TYPE_PAPERMILL))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, get_result_file(JOB_TYPE_PAPERMILL))))
|
||||
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
# Test LocalPythonConductor does not execute jobs with bad arguments
|
||||
def testLocalPythonConductorBadArgs(self)->None:
|
||||
lpc = LocalPythonConductor(
|
||||
job_queue_dir=TEST_JOB_QUEUE,
|
||||
job_output_dir=TEST_JOB_OUTPUT
|
||||
)
|
||||
|
||||
file_path = os.path.join(TEST_MONITOR_BASE, "test")
|
||||
result_path = os.path.join(TEST_MONITOR_BASE, "output", "test")
|
||||
|
||||
with open(file_path, "w") as f:
|
||||
f.write("Data")
|
||||
|
||||
file_hash = get_hash(file_path, SHA256)
|
||||
|
||||
pattern = FileEventPattern(
|
||||
"pattern",
|
||||
file_path,
|
||||
"recipe_one",
|
||||
"infile",
|
||||
parameters={
|
||||
"extra":"A line from a test Pattern",
|
||||
"outfile":result_path
|
||||
})
|
||||
recipe = JupyterNotebookRecipe(
|
||||
"recipe_one", APPENDING_NOTEBOOK)
|
||||
|
||||
rule = create_rule(pattern, recipe)
|
||||
|
||||
params_dict = {
|
||||
"extra":"extra",
|
||||
"infile":file_path,
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
bad_job_dict = create_job(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params_dict
|
||||
}
|
||||
)
|
||||
|
||||
bad_job_dir = os.path.join(TEST_JOB_QUEUE, bad_job_dict[JOB_ID])
|
||||
make_dir(bad_job_dir)
|
||||
|
||||
bad_param_file = os.path.join(bad_job_dir, PARAMS_FILE)
|
||||
write_yaml(params_dict, bad_param_file)
|
||||
|
||||
bad_meta_path = os.path.join(bad_job_dir, META_FILE)
|
||||
write_yaml(bad_job_dict, bad_meta_path)
|
||||
|
||||
bad_base_file = os.path.join(bad_job_dir,
|
||||
get_base_file(JOB_TYPE_PAPERMILL))
|
||||
write_notebook(APPENDING_NOTEBOOK, bad_base_file)
|
||||
|
||||
lpc.execute(bad_job_dir)
|
||||
|
||||
bad_output_dir = os.path.join(TEST_JOB_OUTPUT, bad_job_dict[JOB_ID])
|
||||
self.assertFalse(os.path.exists(bad_job_dir))
|
||||
self.assertTrue(os.path.exists(bad_output_dir))
|
||||
|
||||
bad_meta_path = os.path.join(bad_output_dir, META_FILE)
|
||||
self.assertTrue(os.path.exists(bad_meta_path))
|
||||
|
||||
bad_job = read_yaml(bad_meta_path)
|
||||
self.assertIsInstance(bad_job, dict)
|
||||
self.assertIn(JOB_ERROR, bad_job)
|
||||
|
||||
# Ensure execution can continue after one failed job
|
||||
good_job_dict = create_job(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params_dict,
|
||||
PYTHON_FUNC:papermill_job_func
|
||||
}
|
||||
)
|
||||
|
||||
good_job_dir = os.path.join(TEST_JOB_QUEUE, good_job_dict[JOB_ID])
|
||||
make_dir(good_job_dir)
|
||||
|
||||
good_param_file = os.path.join(good_job_dir, PARAMS_FILE)
|
||||
write_yaml(params_dict, good_param_file)
|
||||
|
||||
good_meta_path = os.path.join(good_job_dir, META_FILE)
|
||||
write_yaml(good_job_dict, good_meta_path)
|
||||
|
||||
good_base_file = os.path.join(good_job_dir,
|
||||
get_base_file(JOB_TYPE_PAPERMILL))
|
||||
write_notebook(APPENDING_NOTEBOOK, good_base_file)
|
||||
|
||||
lpc.execute(good_job_dir)
|
||||
|
||||
good_job_dir = os.path.join(TEST_JOB_QUEUE, good_job_dict[JOB_ID])
|
||||
self.assertFalse(os.path.exists(good_job_dir))
|
||||
|
||||
good_job_output_dir = os.path.join(TEST_JOB_OUTPUT, good_job_dict[JOB_ID])
|
||||
self.assertTrue(os.path.exists(good_job_output_dir))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(good_job_output_dir, META_FILE)))
|
||||
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(good_job_output_dir, get_base_file(JOB_TYPE_PAPERMILL))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(good_job_output_dir, PARAMS_FILE)))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(good_job_output_dir, get_job_file(JOB_TYPE_PAPERMILL))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(good_job_output_dir, get_result_file(JOB_TYPE_PAPERMILL))))
|
||||
self.assertEqual(count_non_locks(job_output_dir), 4)
|
||||
for f in [META_FILE, JOB_FILE, "result.ipynb", "recipe.ipynb"]:
|
||||
self.assertTrue(os.path.exists(os.path.join(job_output_dir, f)))
|
||||
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
result = read_file(result_path)
|
||||
self.assertEqual(result, "Data\nextra")
|
||||
|
||||
# Test LocalPythonConductor does not execute jobs with missing metafile
|
||||
def testLocalPythonConductorMissingMetafile(self)->None:
|
||||
@ -382,7 +262,7 @@ class PythonTests(unittest.TestCase):
|
||||
|
||||
rule = create_rule(pattern, recipe)
|
||||
|
||||
job_dict = create_job(
|
||||
job_dict = create_job_metadata_dict(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
@ -418,77 +298,6 @@ class PythonTests(unittest.TestCase):
|
||||
"Recieved incorrectly setup job.\n\n[Errno 2] No such file or "
|
||||
f"directory: 'test_job_queue_dir{os.path.sep}{job_dict[JOB_ID]}{os.path.sep}job.yml'")
|
||||
|
||||
# Test LocalPythonConductor does not execute jobs with bad functions
|
||||
def testLocalPythonConductorBadFunc(self)->None:
|
||||
lpc = LocalPythonConductor(
|
||||
job_queue_dir=TEST_JOB_QUEUE,
|
||||
job_output_dir=TEST_JOB_OUTPUT
|
||||
)
|
||||
|
||||
file_path = os.path.join(TEST_MONITOR_BASE, "test")
|
||||
result_path = os.path.join(TEST_MONITOR_BASE, "output", "test")
|
||||
|
||||
with open(file_path, "w") as f:
|
||||
f.write("Data")
|
||||
|
||||
file_hash = get_hash(file_path, SHA256)
|
||||
|
||||
pattern = FileEventPattern(
|
||||
"pattern",
|
||||
file_path,
|
||||
"recipe_one",
|
||||
"infile",
|
||||
parameters={
|
||||
"extra":"A line from a test Pattern",
|
||||
"outfile":result_path
|
||||
})
|
||||
recipe = JupyterNotebookRecipe(
|
||||
"recipe_one", APPENDING_NOTEBOOK)
|
||||
|
||||
rule = create_rule(pattern, recipe)
|
||||
|
||||
params = {
|
||||
"extra":"extra",
|
||||
"infile":file_path,
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params,
|
||||
PYTHON_FUNC:failing_func,
|
||||
}
|
||||
)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
make_dir(job_dir)
|
||||
|
||||
param_file = os.path.join(job_dir, PARAMS_FILE)
|
||||
write_yaml(params, param_file)
|
||||
|
||||
meta_path = os.path.join(job_dir, META_FILE)
|
||||
write_yaml(job_dict, meta_path)
|
||||
|
||||
lpc.execute(job_dir)
|
||||
|
||||
output_dir = os.path.join(TEST_JOB_OUTPUT, job_dict[JOB_ID])
|
||||
self.assertFalse(os.path.exists(job_dir))
|
||||
self.assertTrue(os.path.exists(output_dir))
|
||||
|
||||
meta_path = os.path.join(output_dir, META_FILE)
|
||||
self.assertTrue(os.path.exists(meta_path))
|
||||
|
||||
job = read_yaml(meta_path)
|
||||
self.assertIsInstance(job, dict)
|
||||
self.assertIn(JOB_ERROR, job)
|
||||
|
||||
# Test LocalPythonConductor does not execute jobs with invalid metafile
|
||||
def testLocalPythonConductorInvalidMetafile(self)->None:
|
||||
lpc = LocalPythonConductor(
|
||||
@ -518,7 +327,7 @@ class PythonTests(unittest.TestCase):
|
||||
|
||||
rule = create_rule(pattern, recipe)
|
||||
|
||||
job_dict = create_job(
|
||||
job_dict = create_job_metadata_dict(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
@ -586,7 +395,7 @@ class PythonTests(unittest.TestCase):
|
||||
|
||||
rule = create_rule(pattern, recipe)
|
||||
|
||||
job_dict = create_job(
|
||||
job_dict = create_job_metadata_dict(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
@ -746,10 +555,16 @@ class BashTests(unittest.TestCase):
|
||||
|
||||
# Test LocalBashConductor executes valid bash jobs
|
||||
def testLocalBashConductorValidBashJob(self)->None:
|
||||
from_handler_to_runner_reader, from_handler_to_runner_writer = Pipe()
|
||||
bh = BashHandler(job_queue_dir=TEST_JOB_QUEUE)
|
||||
bh.to_runner_job = from_handler_to_runner_writer
|
||||
|
||||
conductor_to_test_conductor, conductor_to_test_test = Pipe(duplex=True)
|
||||
lpc = LocalBashConductor(
|
||||
job_queue_dir=TEST_JOB_QUEUE,
|
||||
job_output_dir=TEST_JOB_OUTPUT
|
||||
)
|
||||
lpc.to_runner_job = conductor_to_test_conductor
|
||||
|
||||
file_path = os.path.join(TEST_MONITOR_BASE, "test")
|
||||
result_path = os.path.join(TEST_MONITOR_BASE, "output")
|
||||
@ -779,44 +594,34 @@ class BashTests(unittest.TestCase):
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
JOB_TYPE_BASH,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params_dict,
|
||||
}
|
||||
event = create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
make_dir(job_dir)
|
||||
bh.setup_job(event, params_dict)
|
||||
|
||||
meta_path = os.path.join(job_dir, META_FILE)
|
||||
write_yaml(job_dict, meta_path)
|
||||
lpc.start()
|
||||
|
||||
base_script = parameterize_bash_script(
|
||||
COMPLETE_BASH_SCRIPT, params_dict
|
||||
)
|
||||
base_file = os.path.join(job_dir, get_base_file(JOB_TYPE_BASH))
|
||||
write_file(lines_to_string(base_script), base_file)
|
||||
st = os.stat(base_file)
|
||||
os.chmod(base_file, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
# Get valid job
|
||||
if from_handler_to_runner_reader.poll(3):
|
||||
job_queue_dir = from_handler_to_runner_reader.recv()
|
||||
|
||||
job_script = assemble_bash_job_script()
|
||||
job_file = os.path.join(job_dir, get_job_file(JOB_TYPE_BASH))
|
||||
write_file(lines_to_string(job_script), job_file)
|
||||
st = os.stat(job_file)
|
||||
os.chmod(job_file, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
# Send it to conductor
|
||||
if conductor_to_test_test.poll(3):
|
||||
_ = conductor_to_test_test.recv()
|
||||
conductor_to_test_test.send(job_queue_dir)
|
||||
|
||||
lpc.execute(job_dir)
|
||||
# Wait for job to complete
|
||||
if conductor_to_test_test.poll(3):
|
||||
_ = conductor_to_test_test.recv()
|
||||
conductor_to_test_test.send(1)
|
||||
|
||||
self.assertFalse(os.path.exists(job_dir))
|
||||
|
||||
job_output_dir = os.path.join(TEST_JOB_OUTPUT, job_dict[JOB_ID])
|
||||
job_output_dir = job_queue_dir.replace(TEST_JOB_QUEUE, TEST_JOB_OUTPUT)
|
||||
|
||||
self.assertFalse(os.path.exists(job_queue_dir))
|
||||
self.assertTrue(os.path.exists(job_output_dir))
|
||||
|
||||
meta_path = os.path.join(job_output_dir, META_FILE)
|
||||
@ -824,15 +629,24 @@ class BashTests(unittest.TestCase):
|
||||
status = read_yaml(meta_path)
|
||||
self.assertIsInstance(status, Dict)
|
||||
self.assertIn(JOB_STATUS, status)
|
||||
self.assertEqual(status[JOB_STATUS], STATUS_DONE)
|
||||
|
||||
self.assertEqual(status[JOB_STATUS], STATUS_DONE)
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
self.assertEqual(count_non_locks(job_output_dir), 3)
|
||||
for f in [META_FILE, JOB_FILE]:
|
||||
self.assertTrue(os.path.exists(os.path.join(job_output_dir, f)))
|
||||
job = threadsafe_read_status(os.path.join(job_output_dir, META_FILE))
|
||||
self.assertTrue(os.path.exists(os.path.join(job_output_dir, job["tmp script command"])))
|
||||
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, get_base_file(JOB_TYPE_BASH))))
|
||||
os.path.join(job_output_dir, )))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_output_dir, get_job_file(JOB_TYPE_BASH))))
|
||||
os.path.join(job_output_dir, JOB_FILE)))
|
||||
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
result = read_file(result_path)
|
||||
self.assertEqual(result, "25293\n")
|
||||
|
||||
# Test LocalBashConductor does not execute jobs with missing metafile
|
||||
def testLocalBashConductorMissingMetafile(self)->None:
|
||||
@ -869,7 +683,7 @@ class BashTests(unittest.TestCase):
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
job_dict = create_job_metadata_dict(
|
||||
JOB_TYPE_BASH,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
@ -935,7 +749,7 @@ class BashTests(unittest.TestCase):
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
job_dict = create_job_metadata_dict(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
@ -958,7 +772,7 @@ class BashTests(unittest.TestCase):
|
||||
base_script = parameterize_bash_script(
|
||||
COMPLETE_BASH_SCRIPT, params_dict
|
||||
)
|
||||
base_file = os.path.join(job_dir, get_base_file(JOB_TYPE_BASH))
|
||||
base_file = os.path.join(job_dir, JOB_FILE)
|
||||
write_file(lines_to_string(base_script), base_file)
|
||||
st = os.stat(base_file)
|
||||
os.chmod(base_file, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
@ -967,7 +781,7 @@ class BashTests(unittest.TestCase):
|
||||
"#!/bin/bash",
|
||||
"echo Does Nothing"
|
||||
]
|
||||
job_file = os.path.join(job_dir, get_job_file(JOB_TYPE_BASH))
|
||||
job_file = os.path.join(job_dir, JOB_FILE)
|
||||
write_file(lines_to_string(job_script), job_file)
|
||||
st = os.stat(job_file)
|
||||
os.chmod(job_file, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
@ -1019,7 +833,7 @@ class BashTests(unittest.TestCase):
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
job_dict = create_job_metadata_dict(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
@ -1088,7 +902,7 @@ class BashTests(unittest.TestCase):
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
job_dict = create_job_metadata_dict(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
|
@ -17,7 +17,7 @@ from meow_base.core.vars import CHAR_LOWERCASE, CHAR_UPPERCASE, \
|
||||
WATCHDOG_BASE, WATCHDOG_HASH, EVENT_RULE, JOB_PARAMETERS, \
|
||||
PYTHON_FUNC, JOB_ID, JOB_EVENT, JOB_ERROR, STATUS_DONE, \
|
||||
JOB_TYPE, JOB_PATTERN, JOB_RECIPE, JOB_RULE, JOB_STATUS, JOB_CREATE_TIME, \
|
||||
JOB_REQUIREMENTS, STATUS_QUEUED, JOB_TYPE_PAPERMILL
|
||||
JOB_REQUIREMENTS, STATUS_QUEUED, JOB_TYPE_PAPERMILL, STATUS_CREATING
|
||||
from meow_base.functionality.debug import setup_debugging
|
||||
from meow_base.functionality.file_io import lines_to_string, make_dir, \
|
||||
read_file, read_file_lines, read_notebook, read_yaml, rmtree, write_file, \
|
||||
@ -27,7 +27,7 @@ from meow_base.functionality.hashing import get_hash
|
||||
from meow_base.functionality.meow import KEYWORD_BASE, KEYWORD_DIR, \
|
||||
KEYWORD_EXTENSION, KEYWORD_FILENAME, KEYWORD_JOB, KEYWORD_PATH, \
|
||||
KEYWORD_PREFIX, KEYWORD_REL_DIR, KEYWORD_REL_PATH, \
|
||||
create_event, create_job, create_rule, create_rules, \
|
||||
create_event, create_job_metadata_dict, create_rule, create_rules, \
|
||||
create_watchdog_event, replace_keywords, create_parameter_sweep
|
||||
from meow_base.functionality.naming import _generate_id
|
||||
from meow_base.functionality.parameterisation import \
|
||||
@ -645,7 +645,7 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertEqual(event2[EVENT_RULE], rule)
|
||||
self.assertEqual(event2["a"], 1)
|
||||
|
||||
# Test that create_job produces valid job dictionary
|
||||
# Test that create_job_metadata_dict produces valid job dictionary
|
||||
def testCreateJob(self)->None:
|
||||
pattern = FileEventPattern(
|
||||
"pattern",
|
||||
@ -672,7 +672,7 @@ class MeowTests(unittest.TestCase):
|
||||
}
|
||||
)
|
||||
|
||||
job_dict = create_job(
|
||||
job_dict = create_job_metadata_dict(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
event,
|
||||
extras={
|
||||
@ -699,7 +699,7 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertIn(JOB_RULE, job_dict)
|
||||
self.assertEqual(job_dict[JOB_RULE], rule.name)
|
||||
self.assertIn(JOB_STATUS, job_dict)
|
||||
self.assertEqual(job_dict[JOB_STATUS], STATUS_QUEUED)
|
||||
self.assertEqual(job_dict[JOB_STATUS], STATUS_CREATING)
|
||||
self.assertIn(JOB_CREATE_TIME, job_dict)
|
||||
self.assertIsInstance(job_dict[JOB_CREATE_TIME], datetime)
|
||||
self.assertIn(JOB_REQUIREMENTS, job_dict)
|
||||
|
@ -1,38 +1,28 @@
|
||||
|
||||
import jsonschema
|
||||
import os
|
||||
import stat
|
||||
import subprocess
|
||||
import unittest
|
||||
|
||||
from multiprocessing import Pipe
|
||||
from typing import Dict
|
||||
|
||||
from meow_base.core.meow import valid_job
|
||||
from meow_base.core.vars import EVENT_TYPE, WATCHDOG_BASE, \
|
||||
EVENT_RULE, EVENT_TYPE_WATCHDOG, EVENT_PATH, SHA256, WATCHDOG_HASH, \
|
||||
JOB_ID, JOB_TYPE_PYTHON, JOB_PARAMETERS, PYTHON_FUNC, \
|
||||
JOB_STATUS, META_FILE, JOB_ERROR, PARAMS_FILE, SWEEP_STOP, SWEEP_JUMP, \
|
||||
SWEEP_START, JOB_TYPE_PAPERMILL, JOB_TYPE_BASH, \
|
||||
get_base_file, get_job_file, get_result_file
|
||||
JOB_PARAMETERS, JOB_FILE, META_FILE, SWEEP_STOP, SWEEP_JUMP, \
|
||||
SWEEP_START
|
||||
from meow_base.core.rule import Rule
|
||||
from meow_base.functionality.file_io import lines_to_string, make_dir, \
|
||||
read_yaml, write_file, write_notebook, write_yaml
|
||||
from meow_base.functionality.file_io import read_yaml, write_notebook, \
|
||||
threadsafe_read_status
|
||||
from meow_base.functionality.hashing import get_hash
|
||||
from meow_base.functionality.meow import create_job, create_rules, \
|
||||
create_rule, create_watchdog_event
|
||||
from meow_base.functionality.parameterisation import parameterize_bash_script
|
||||
from meow_base.functionality.meow import create_rules, create_rule
|
||||
from meow_base.patterns.file_event_pattern import FileEventPattern
|
||||
from meow_base.recipes.bash_recipe import BashRecipe, BashHandler, \
|
||||
assemble_bash_job_script
|
||||
from meow_base.recipes.bash_recipe import BashRecipe, BashHandler
|
||||
from meow_base.recipes.jupyter_notebook_recipe import JupyterNotebookRecipe, \
|
||||
PapermillHandler, papermill_job_func, get_recipe_from_notebook
|
||||
from meow_base.recipes.python_recipe import PythonRecipe, PythonHandler, \
|
||||
python_job_func
|
||||
PapermillHandler, get_recipe_from_notebook
|
||||
from meow_base.recipes.python_recipe import PythonRecipe, PythonHandler
|
||||
from shared import BAREBONES_PYTHON_SCRIPT, COMPLETE_PYTHON_SCRIPT, \
|
||||
TEST_JOB_QUEUE, TEST_MONITOR_BASE, TEST_JOB_OUTPUT, BAREBONES_NOTEBOOK, \
|
||||
APPENDING_NOTEBOOK, COMPLETE_NOTEBOOK, BAREBONES_BASH_SCRIPT, \
|
||||
COMPLETE_BASH_SCRIPT, \
|
||||
COMPLETE_NOTEBOOK, BAREBONES_BASH_SCRIPT, COMPLETE_BASH_SCRIPT, \
|
||||
setup, teardown
|
||||
|
||||
class JupyterNotebookTests(unittest.TestCase):
|
||||
@ -338,94 +328,6 @@ class PapermillHandlerTests(unittest.TestCase):
|
||||
values.remove(val)
|
||||
self.assertEqual(len(values), 0)
|
||||
|
||||
# Test jobFunc performs as expected
|
||||
def testJobFunc(self)->None:
|
||||
file_path = os.path.join(TEST_MONITOR_BASE, "test")
|
||||
result_path = os.path.join(TEST_MONITOR_BASE, "output", "test")
|
||||
|
||||
with open(file_path, "w") as f:
|
||||
f.write("Data")
|
||||
|
||||
file_hash = get_hash(file_path, SHA256)
|
||||
|
||||
pattern = FileEventPattern(
|
||||
"pattern",
|
||||
file_path,
|
||||
"recipe_one",
|
||||
"infile",
|
||||
parameters={
|
||||
"extra":"A line from a test Pattern",
|
||||
"outfile":result_path
|
||||
})
|
||||
recipe = JupyterNotebookRecipe(
|
||||
"recipe_one", APPENDING_NOTEBOOK)
|
||||
|
||||
rule = create_rule(pattern, recipe)
|
||||
|
||||
params_dict = {
|
||||
"extra":"extra",
|
||||
"infile":file_path,
|
||||
"outfile":result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
JOB_TYPE_PAPERMILL,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params_dict,
|
||||
PYTHON_FUNC:papermill_job_func
|
||||
}
|
||||
)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
make_dir(job_dir)
|
||||
|
||||
meta_file = os.path.join(job_dir, META_FILE)
|
||||
write_yaml(job_dict, meta_file)
|
||||
|
||||
param_file = os.path.join(job_dir, PARAMS_FILE)
|
||||
write_yaml(params_dict, param_file)
|
||||
|
||||
base_file = os.path.join(job_dir, get_base_file(JOB_TYPE_PAPERMILL))
|
||||
write_notebook(APPENDING_NOTEBOOK, base_file)
|
||||
|
||||
papermill_job_func(job_dir)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
self.assertTrue(os.path.exists(job_dir))
|
||||
|
||||
meta_path = os.path.join(job_dir, META_FILE)
|
||||
self.assertTrue(os.path.exists(meta_path))
|
||||
status = read_yaml(meta_path)
|
||||
self.assertIsInstance(status, Dict)
|
||||
self.assertIn(JOB_STATUS, status)
|
||||
self.assertEqual(status[JOB_STATUS], job_dict[JOB_STATUS])
|
||||
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_dir, get_base_file(JOB_TYPE_PAPERMILL))))
|
||||
self.assertTrue(os.path.exists(os.path.join(job_dir, PARAMS_FILE)))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_dir, get_job_file(JOB_TYPE_PAPERMILL))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_dir, get_result_file(JOB_TYPE_PAPERMILL))))
|
||||
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
# Test jobFunc doesn't execute with no args
|
||||
def testJobFuncBadArgs(self)->None:
|
||||
try:
|
||||
papermill_job_func({})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.assertEqual(len(os.listdir(TEST_JOB_QUEUE)), 0)
|
||||
self.assertEqual(len(os.listdir(TEST_JOB_OUTPUT)), 0)
|
||||
|
||||
# Test handling criteria function
|
||||
def testValidHandleCriteria(self)->None:
|
||||
ph = PapermillHandler()
|
||||
@ -840,100 +742,6 @@ class PythonHandlerTests(unittest.TestCase):
|
||||
values.remove(val)
|
||||
self.assertEqual(len(values), 0)
|
||||
|
||||
# Test jobFunc performs as expected
|
||||
def testJobFunc(self)->None:
|
||||
file_path = os.path.join(TEST_MONITOR_BASE, "test")
|
||||
result_path = os.path.join(TEST_MONITOR_BASE, "output")
|
||||
|
||||
with open(file_path, "w") as f:
|
||||
f.write("250")
|
||||
|
||||
file_hash = get_hash(file_path, SHA256)
|
||||
|
||||
pattern = FileEventPattern(
|
||||
"pattern",
|
||||
file_path,
|
||||
"recipe_one",
|
||||
"infile",
|
||||
parameters={
|
||||
"extra":"A line from a test Pattern",
|
||||
"outfile": result_path
|
||||
})
|
||||
recipe = PythonRecipe(
|
||||
"recipe_one", COMPLETE_PYTHON_SCRIPT)
|
||||
|
||||
rule = create_rule(pattern, recipe)
|
||||
|
||||
params_dict = {
|
||||
"extra":"extra",
|
||||
"infile":file_path,
|
||||
"outfile": result_path
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
JOB_TYPE_PYTHON,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params_dict,
|
||||
PYTHON_FUNC:python_job_func
|
||||
}
|
||||
)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
make_dir(job_dir)
|
||||
|
||||
meta_file = os.path.join(job_dir, META_FILE)
|
||||
write_yaml(job_dict, meta_file)
|
||||
|
||||
param_file = os.path.join(job_dir, PARAMS_FILE)
|
||||
write_yaml(params_dict, param_file)
|
||||
|
||||
base_file = os.path.join(job_dir, get_base_file(JOB_TYPE_PYTHON))
|
||||
write_notebook(APPENDING_NOTEBOOK, base_file)
|
||||
write_file(lines_to_string(COMPLETE_PYTHON_SCRIPT), base_file)
|
||||
|
||||
python_job_func(job_dir)
|
||||
|
||||
self.assertTrue(os.path.exists(job_dir))
|
||||
meta_path = os.path.join(job_dir, META_FILE)
|
||||
self.assertTrue(os.path.exists(meta_path))
|
||||
|
||||
status = read_yaml(meta_path)
|
||||
self.assertIsInstance(status, Dict)
|
||||
self.assertIn(JOB_STATUS, status)
|
||||
self.assertEqual(status[JOB_STATUS], job_dict[JOB_STATUS])
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_dir, get_base_file(JOB_TYPE_PYTHON))))
|
||||
self.assertTrue(os.path.exists(os.path.join(job_dir, PARAMS_FILE)))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_dir, get_job_file(JOB_TYPE_PYTHON))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_dir, get_result_file(JOB_TYPE_PYTHON))))
|
||||
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
with open(result_path, "r") as f:
|
||||
result = f.read()
|
||||
|
||||
self.assertEqual(result, "124937.5")
|
||||
|
||||
# Test jobFunc doesn't execute with no args
|
||||
def testJobFuncBadArgs(self)->None:
|
||||
try:
|
||||
python_job_func({})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.assertEqual(len(os.listdir(TEST_JOB_QUEUE)), 0)
|
||||
self.assertEqual(len(os.listdir(TEST_JOB_OUTPUT)), 0)
|
||||
|
||||
# Test handling criteria function
|
||||
def testValidHandleCriteria(self)->None:
|
||||
ph = PythonHandler()
|
||||
@ -1336,97 +1144,61 @@ class BashHandlerTests(unittest.TestCase):
|
||||
values.remove(val)
|
||||
self.assertEqual(len(values), 0)
|
||||
|
||||
# Test jobFunc performs as expected
|
||||
def testJobFunc(self)->None:
|
||||
file_path = os.path.join(TEST_MONITOR_BASE, "test")
|
||||
result_path = os.path.join(TEST_MONITOR_BASE, "output")
|
||||
def testJobSetup(self)->None:
|
||||
from_handler_to_runner_reader, from_handler_to_runner_writer = Pipe()
|
||||
bh = BashHandler(job_queue_dir=TEST_JOB_QUEUE)
|
||||
bh.to_runner_job = from_handler_to_runner_writer
|
||||
|
||||
with open(os.path.join(TEST_MONITOR_BASE, "A"), "w") as f:
|
||||
f.write("Data")
|
||||
|
||||
with open(file_path, "w") as f:
|
||||
f.write("250")
|
||||
|
||||
file_hash = get_hash(file_path, SHA256)
|
||||
|
||||
pattern = FileEventPattern(
|
||||
"pattern",
|
||||
file_path,
|
||||
"recipe_one",
|
||||
"infile",
|
||||
parameters={
|
||||
"extra":"A line from a test Pattern",
|
||||
"outfile": result_path
|
||||
})
|
||||
pattern_one = FileEventPattern(
|
||||
"pattern_one", "A", "recipe_one", "file_one")
|
||||
recipe = BashRecipe(
|
||||
"recipe_one", COMPLETE_BASH_SCRIPT)
|
||||
|
||||
rule = create_rule(pattern, recipe)
|
||||
|
||||
params_dict = {
|
||||
"extra":"extra",
|
||||
"infile":file_path,
|
||||
"outfile": result_path
|
||||
patterns = {
|
||||
pattern_one.name: pattern_one,
|
||||
}
|
||||
recipes = {
|
||||
recipe.name: recipe,
|
||||
}
|
||||
|
||||
job_dict = create_job(
|
||||
JOB_TYPE_BASH,
|
||||
create_watchdog_event(
|
||||
file_path,
|
||||
rule,
|
||||
TEST_MONITOR_BASE,
|
||||
file_hash
|
||||
),
|
||||
extras={
|
||||
JOB_PARAMETERS:params_dict
|
||||
}
|
||||
)
|
||||
rules = create_rules(patterns, recipes)
|
||||
self.assertEqual(len(rules), 1)
|
||||
_, rule = rules.popitem()
|
||||
self.assertIsInstance(rule, Rule)
|
||||
|
||||
job_dir = os.path.join(TEST_JOB_QUEUE, job_dict[JOB_ID])
|
||||
make_dir(job_dir)
|
||||
self.assertEqual(len(os.listdir(TEST_JOB_OUTPUT)), 0)
|
||||
|
||||
meta_file = os.path.join(job_dir, META_FILE)
|
||||
write_yaml(job_dict, meta_file)
|
||||
event = {
|
||||
EVENT_TYPE: EVENT_TYPE_WATCHDOG,
|
||||
EVENT_PATH: os.path.join(TEST_MONITOR_BASE, "A"),
|
||||
WATCHDOG_BASE: TEST_MONITOR_BASE,
|
||||
EVENT_RULE: rule,
|
||||
WATCHDOG_HASH: get_hash(
|
||||
os.path.join(TEST_MONITOR_BASE, "A"), SHA256
|
||||
)
|
||||
}
|
||||
|
||||
base_script = parameterize_bash_script(
|
||||
COMPLETE_BASH_SCRIPT, params_dict
|
||||
)
|
||||
base_file = os.path.join(job_dir, get_base_file(JOB_TYPE_BASH))
|
||||
write_file(lines_to_string(base_script), base_file)
|
||||
st = os.stat(base_file)
|
||||
os.chmod(base_file, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
params_dict = {
|
||||
"file_one": os.path.join(TEST_MONITOR_BASE, "A")
|
||||
}
|
||||
|
||||
job_script = assemble_bash_job_script()
|
||||
job_file = os.path.join(job_dir, get_job_file(JOB_TYPE_BASH))
|
||||
write_file(lines_to_string(job_script), job_file)
|
||||
st = os.stat(job_file)
|
||||
os.chmod(job_file, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
bh.setup_job(event, params_dict)
|
||||
|
||||
print(os.listdir(job_dir))
|
||||
print(os.getcwd())
|
||||
|
||||
result = subprocess.call(job_file, cwd=".")
|
||||
|
||||
self.assertEqual(result, 0)
|
||||
if from_handler_to_runner_reader.poll(3):
|
||||
job_dir = from_handler_to_runner_reader.recv()
|
||||
|
||||
self.assertIsInstance(job_dir, str)
|
||||
self.assertTrue(os.path.exists(job_dir))
|
||||
meta_path = os.path.join(job_dir, META_FILE)
|
||||
self.assertTrue(os.path.exists(meta_path))
|
||||
|
||||
status = read_yaml(meta_path)
|
||||
self.assertIsInstance(status, Dict)
|
||||
self.assertIn(JOB_STATUS, status)
|
||||
self.assertEqual(status[JOB_STATUS], job_dict[JOB_STATUS])
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
self.assertTrue(len(os.listdir(job_dir)), 3)
|
||||
for f in [META_FILE, "recipe.sh", JOB_FILE]:
|
||||
self.assertTrue(os.path.exists(os.path.join(job_dir, f)))
|
||||
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_dir, get_base_file(JOB_TYPE_BASH))))
|
||||
self.assertTrue(os.path.exists(
|
||||
os.path.join(job_dir, get_job_file(JOB_TYPE_BASH))))
|
||||
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
with open(result_path, "r") as f:
|
||||
result = f.read()
|
||||
|
||||
self.assertEqual(result, "124937\n")
|
||||
job = threadsafe_read_status(os.path.join(job_dir, META_FILE))
|
||||
valid_job(job)
|
||||
|
||||
# Test handling criteria function
|
||||
def testValidHandleCriteria(self)->None:
|
||||
|
@ -1,5 +1,4 @@
|
||||
|
||||
import io
|
||||
import importlib
|
||||
import os
|
||||
import unittest
|
||||
@ -7,7 +6,6 @@ import unittest
|
||||
from multiprocessing import Pipe
|
||||
from random import shuffle
|
||||
from shutil import copy
|
||||
from time import sleep
|
||||
from warnings import warn
|
||||
|
||||
from meow_base.core.base_conductor import BaseConductor
|
||||
@ -15,7 +13,7 @@ from meow_base.core.base_handler import BaseHandler
|
||||
from meow_base.core.base_monitor import BaseMonitor
|
||||
from meow_base.conductors import LocalPythonConductor
|
||||
from meow_base.core.vars import JOB_TYPE_PAPERMILL, JOB_ERROR, \
|
||||
META_FILE, JOB_TYPE_PYTHON, JOB_CREATE_TIME, get_result_file
|
||||
META_FILE, JOB_TYPE_PYTHON, JOB_CREATE_TIME
|
||||
from meow_base.core.runner import MeowRunner
|
||||
from meow_base.functionality.file_io import make_dir, read_file, \
|
||||
read_notebook, read_yaml, write_file, lines_to_string
|
||||
@ -372,7 +370,7 @@ class MeowTests(unittest.TestCase):
|
||||
f.write("Initial Data")
|
||||
|
||||
loops = 0
|
||||
while loops < 5:
|
||||
while loops < 10:
|
||||
# Initial prompt
|
||||
if conductor_to_test_test.poll(5):
|
||||
msg = conductor_to_test_test.recv()
|
||||
@ -396,7 +394,7 @@ class MeowTests(unittest.TestCase):
|
||||
else:
|
||||
raise Exception("Timed out")
|
||||
self.assertEqual(msg, 1)
|
||||
loops = 5
|
||||
loops = 10
|
||||
|
||||
loops += 1
|
||||
|
||||
@ -409,10 +407,10 @@ class MeowTests(unittest.TestCase):
|
||||
runner.stop()
|
||||
|
||||
print(os.listdir(job_dir))
|
||||
self.assertEqual(count_non_locks(job_dir), 5)
|
||||
self.assertEqual(count_non_locks(job_dir), 4)
|
||||
|
||||
result = read_notebook(
|
||||
os.path.join(job_dir, get_result_file(JOB_TYPE_PAPERMILL)))
|
||||
os.path.join(job_dir, "result.ipynb"))
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
output_path = os.path.join(TEST_MONITOR_BASE, "output", "A.txt")
|
||||
@ -522,10 +520,10 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertIn(job_ids[1], os.listdir(TEST_JOB_OUTPUT))
|
||||
|
||||
mid_job_dir = os.path.join(TEST_JOB_OUTPUT, job_ids[0])
|
||||
self.assertEqual(count_non_locks(mid_job_dir), 5)
|
||||
self.assertEqual(count_non_locks(mid_job_dir), 4)
|
||||
|
||||
result = read_notebook(
|
||||
os.path.join(mid_job_dir, get_result_file(JOB_TYPE_PAPERMILL)))
|
||||
os.path.join(mid_job_dir, "result.ipynb"))
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
mid_output_path = os.path.join(TEST_MONITOR_BASE, "middle", "A.txt")
|
||||
@ -537,10 +535,10 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertEqual(data, "Initial Data\nA line from Pattern 1")
|
||||
|
||||
final_job_dir = os.path.join(TEST_JOB_OUTPUT, job_ids[1])
|
||||
self.assertEqual(count_non_locks(final_job_dir), 5)
|
||||
self.assertEqual(count_non_locks(final_job_dir), 4)
|
||||
|
||||
result = read_notebook(os.path.join(final_job_dir,
|
||||
get_result_file(JOB_TYPE_PAPERMILL)))
|
||||
"result.ipynb"))
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
final_output_path = os.path.join(TEST_MONITOR_BASE, "output", "A.txt")
|
||||
@ -651,11 +649,11 @@ class MeowTests(unittest.TestCase):
|
||||
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
result_path = os.path.join(job_dir, get_result_file(JOB_TYPE_PYTHON))
|
||||
result_path = os.path.join(job_dir, "output.log")
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
result = read_file(os.path.join(result_path))
|
||||
self.assertEqual(
|
||||
result, "--STDOUT--\n12505000.0\ndone\n\n\n--STDERR--\n\n")
|
||||
result, "12505000.0\ndone\n")
|
||||
|
||||
output_path = os.path.join(TEST_MONITOR_BASE, "output", "A.txt")
|
||||
self.assertTrue(os.path.exists(output_path))
|
||||
@ -779,18 +777,18 @@ class MeowTests(unittest.TestCase):
|
||||
final_job_id = job_ids[0]
|
||||
|
||||
mid_job_dir = os.path.join(TEST_JOB_OUTPUT, mid_job_id)
|
||||
self.assertEqual(count_non_locks(mid_job_dir), 5)
|
||||
self.assertEqual(count_non_locks(mid_job_dir), 4)
|
||||
|
||||
mid_metafile = os.path.join(mid_job_dir, META_FILE)
|
||||
mid_status = read_yaml(mid_metafile)
|
||||
self.assertNotIn(JOB_ERROR, mid_status)
|
||||
|
||||
mid_result_path = os.path.join(
|
||||
mid_job_dir, get_result_file(JOB_TYPE_PYTHON))
|
||||
mid_job_dir, "output.log")
|
||||
self.assertTrue(os.path.exists(mid_result_path))
|
||||
mid_result = read_file(os.path.join(mid_result_path))
|
||||
self.assertEqual(
|
||||
mid_result, "--STDOUT--\n7806.25\ndone\n\n\n--STDERR--\n\n")
|
||||
mid_result, "7806.25\ndone\n")
|
||||
|
||||
mid_output_path = os.path.join(TEST_MONITOR_BASE, "middle", "A.txt")
|
||||
self.assertTrue(os.path.exists(mid_output_path))
|
||||
@ -798,17 +796,17 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertEqual(mid_output, "7806.25")
|
||||
|
||||
final_job_dir = os.path.join(TEST_JOB_OUTPUT, final_job_id)
|
||||
self.assertEqual(count_non_locks(final_job_dir), 5)
|
||||
self.assertEqual(count_non_locks(final_job_dir), 4)
|
||||
|
||||
final_metafile = os.path.join(final_job_dir, META_FILE)
|
||||
final_status = read_yaml(final_metafile)
|
||||
self.assertNotIn(JOB_ERROR, final_status)
|
||||
|
||||
final_result_path = os.path.join(final_job_dir, get_result_file(JOB_TYPE_PYTHON))
|
||||
final_result_path = os.path.join(final_job_dir, "output.log")
|
||||
self.assertTrue(os.path.exists(final_result_path))
|
||||
final_result = read_file(os.path.join(final_result_path))
|
||||
self.assertEqual(
|
||||
final_result, "--STDOUT--\n2146.5625\ndone\n\n\n--STDERR--\n\n")
|
||||
final_result, "2146.5625\ndone\n")
|
||||
|
||||
final_output_path = os.path.join(TEST_MONITOR_BASE, "output", "A.txt")
|
||||
self.assertTrue(os.path.exists(final_output_path))
|
||||
@ -916,7 +914,7 @@ class MeowTests(unittest.TestCase):
|
||||
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
result_path = os.path.join(job_dir, get_result_file(JOB_TYPE_PYTHON))
|
||||
result_path = os.path.join(job_dir, "output.log")
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
output_path = os.path.join(TEST_MONITOR_BASE, "output", "A.txt")
|
||||
@ -1091,11 +1089,11 @@ class MeowTests(unittest.TestCase):
|
||||
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
result_path = os.path.join(job_dir, get_result_file(JOB_TYPE_PYTHON))
|
||||
result_path = os.path.join(job_dir, "output.log")
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
result = read_file(os.path.join(result_path))
|
||||
self.assertEqual(
|
||||
result, "--STDOUT--\n12505000.0\ndone\n\n\n--STDERR--\n\n")
|
||||
result, "12505000.0\ndone\n")
|
||||
|
||||
output_path = os.path.join(TEST_MONITOR_BASE, "output", "A.txt")
|
||||
self.assertTrue(os.path.exists(output_path))
|
||||
@ -1281,7 +1279,7 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
result_path = os.path.join(
|
||||
TEST_JOB_OUTPUT, job_dir, get_result_file(JOB_TYPE_PAPERMILL))
|
||||
TEST_JOB_OUTPUT, job_dir, "result.ipynb")
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
# Test some actual scientific analysis, in a predicatable loop
|
||||
@ -1426,7 +1424,7 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
result_path = os.path.join(
|
||||
TEST_JOB_OUTPUT, job_dir, get_result_file(JOB_TYPE_PAPERMILL))
|
||||
TEST_JOB_OUTPUT, job_dir, "result.ipynb")
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
results = len(os.listdir(
|
||||
@ -1591,7 +1589,7 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
result_path = os.path.join(
|
||||
TEST_JOB_OUTPUT, job_dir, get_result_file(JOB_TYPE_PAPERMILL))
|
||||
TEST_JOB_OUTPUT, job_dir, "result.ipynb")
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
outputs = len(os.listdir(TEST_JOB_OUTPUT))
|
||||
@ -1761,7 +1759,7 @@ class MeowTests(unittest.TestCase):
|
||||
self.assertNotIn(JOB_ERROR, status)
|
||||
|
||||
result_path = os.path.join(
|
||||
TEST_JOB_OUTPUT, job_dir, get_result_file(JOB_TYPE_PAPERMILL))
|
||||
TEST_JOB_OUTPUT, job_dir, "result.ipynb")
|
||||
self.assertTrue(os.path.exists(result_path))
|
||||
|
||||
outputs = len(os.listdir(TEST_JOB_OUTPUT))
|
||||
|
Reference in New Issue
Block a user