Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

view_jobs(): adjust helper function to actually return a string #522

Merged
merged 3 commits into from
Oct 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def convert_to_str(job_records):
)
)

return job_record_str
return "\n".join(job_record_str)


def convert_to_obj(rows):
Expand Down
11 changes: 0 additions & 11 deletions src/cmd/flux-account.py
Original file line number Diff line number Diff line change
Expand Up @@ -686,17 +686,6 @@ def select_accounting_function(args, output_file, parser):
"pop_db": "accounting.pop_db",
}

if args.func == "view_job_records":
data["output_file"] = output_file
return_val = flux.Flux().rpc(func_map[args.func], data).get()
# the return value of view-job-records without an output file is
# just a list of strings, so just iterate through that list and
# then return
job_record_list = list(return_val.values())
for job_record in job_record_list[0]:
print(job_record)
return

if args.func in func_map:
return_val = flux.Flux().rpc(func_map[args.func], data).get()
else:
Expand Down
46 changes: 21 additions & 25 deletions t/python/t1006_job_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,6 @@ def setUpClass(self):
global cur

# create example job-archive database, output file
global op
op = "job_records.csv"

c.create_db("FluxAccountingUsers.db")
try:
acct_conn = sqlite3.connect("file:FluxAccountingUsers.db?mode=rw", uri=True)
Expand Down Expand Up @@ -147,74 +144,74 @@ def populate_job_archive_db(acct_conn, userid, bank, ranks, nodes, num_entries):
# its job information
def test_01_with_jobid_valid(self):
my_dict = {"jobid": 102}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 2)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 1)

# passing a bad jobid should return no records
def test_02_with_jobid_failure(self):
my_dict = {"jobid": 000}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 1)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 0)

# passing a timestamp before the first job to
# start should return all of the jobs
def test_03_after_start_time_all(self):
my_dict = {"after_start_time": 0}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 19)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 18)

# passing a timestamp after all of the start time
# of all the completed jobs should return a failure message
# of all the completed jobs should return no jobs
@mock.patch("time.time", mock.MagicMock(return_value=11000000))
def test_04_after_start_time_none(self):
my_dict = {"after_start_time": time.time()}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 1)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 0)

# passing a timestamp before the end time of the
# last job should return all of the jobs
@mock.patch("time.time", mock.MagicMock(return_value=11000000))
def test_05_before_end_time_all(self):
my_dict = {"before_end_time": time.time()}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 19)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 18)

# passing a timestamp before the end time of
# the first completed jobs should return no jobs
def test_06_before_end_time_none(self):
my_dict = {"before_end_time": 0}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 1)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 0)

# passing a user not in the jobs table
# should return no jobs
def test_07_by_user_failure(self):
my_dict = {"user": "9999"}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 1)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 0)

# view_jobs_run_by_username() interacts with a
# passwd file; for the purpose of these tests,
# just pass the userid
def test_08_by_user_success(self):
my_dict = {"user": "1001"}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 3)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 2)

# passing a combination of params should further
# refine the query
@mock.patch("time.time", mock.MagicMock(return_value=9000500))
def test_09_multiple_params(self):
my_dict = {"user": "1001", "after_start_time": time.time()}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 2)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 1)

# passing no parameters will result in a generic query
# returning all results
def test_10_no_options_passed(self):
my_dict = {}
job_records = j.view_jobs(acct_conn, op, **my_dict)
self.assertEqual(len(job_records), 19)
job_records = j.get_jobs(acct_conn, **my_dict)
self.assertEqual(len(job_records), 18)

# users that have run a lot of jobs should have a larger usage factor
@mock.patch("time.time", mock.MagicMock(return_value=9900000))
Expand Down Expand Up @@ -425,7 +422,6 @@ def test_20_update_job_usage_next_half_life_period(self):
# remove database and log file
@classmethod
def tearDownClass(self):
os.remove("job_records.csv")
os.remove("FluxAccountingUsers.db")


Expand Down
Loading