Skip to content

Commit

Permalink
style: Linting
Browse files Browse the repository at this point in the history
  • Loading branch information
bmtcril committed Mar 20, 2024
1 parent 1437ffa commit d890bfd
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

from django.core.management.base import BaseCommand, CommandError

# For testing we won't be able to import from edx-platform
try: # pragma: no cover
from cms.djangoapps.contentstore.views.course import create_new_course_in_store
from common.djangoapps.student.helpers import do_create_account
Expand All @@ -40,7 +41,7 @@

class LoadTest:
"""
Base class for setting up and sending events.
Runs the load test and reports results to ClickHouse.
"""

course = None
Expand Down Expand Up @@ -84,6 +85,9 @@ def __init__(self, num_users: int, username_prefix: str):
self.create_and_enroll_learners(num_users, username_prefix)

def create_and_enroll_learners(self, num_users, username_prefix):
"""
Uses create test users and enroll them in our test course.
"""
log.info(f"Creating {num_users} users prefixed with {username_prefix}.")

for _ in range(num_users):
Expand All @@ -102,6 +106,9 @@ def create_and_enroll_learners(self, num_users, username_prefix):
e.save()

def create_user(self, **user_data):
"""
Create, activate, and return a user using the edx-platform API.
"""
account_creation_form = AccountCreationForm(data=user_data, tos_required=False)

user, _, _ = do_create_account(account_creation_form)
Expand All @@ -112,6 +119,10 @@ def create_user(self, **user_data):
def trigger_events(
self, num_events: int, sleep_time: float, run_until_killed: bool
) -> None:
"""
Trigger the appropriate number of events based on configuration.
"""

if run_until_killed:
log.info(f"Creating events until killed with {sleep_time} sleep between!")
while True:
Expand All @@ -122,6 +133,9 @@ def trigger_events(
self.trigger_event_and_sleep(sleep_time)

def trigger_event_and_sleep(self, sleep_time: float) -> None:
"""
Cause a tracking log to be emitted and sleep the specified amount of time.
"""
user = choice(self.users)
log.info(f"Triggering event for user {user.username}.")
e = CourseEnrollment.get_or_create_enrollment(
Expand Down Expand Up @@ -181,7 +195,7 @@ def add_arguments(self, parser: Any) -> None:

def handle(self, *args, **options):
"""
Creates users and triggers events for them as configured above.
Create users and trigger events for them as configured above.
"""
if not RUNNING_IN_PLATFORM: # pragma: no cover
raise CommandError("This command must be run in the Open edX LMS or CMS.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@


class Monitor:
"""
Manages the configuration and state of the load test monitor.
"""
def __init__(self, sleep_time: float, backend: str):
self.run_id = str(uuid.uuid4())[:6]
self.ch_url = settings.EVENT_SINK_CLICKHOUSE_BACKEND_CONFIG["url"]
Expand All @@ -52,10 +55,13 @@ def __init__(self, sleep_time: float, backend: str):
self.backend = backend

def run(self) -> None:
collect_redis_bus = True if self.backend == "redis_bus" else False
collect_celery = True if self.backend == "celery" else False
collect_kafka_bus = True if self.backend == "kafka_bus" else False
collect_vector = True if self.backend == "vector" else False
"""
Run the monitor until killed.
"""
collect_redis_bus = self.backend == "redis_bus"
collect_celery = self.backend == "celery"
collect_kafka_bus = self.backend == "kafka_bus"
collect_vector = self.backend == "vector"

while True:
start = datetime.datetime.now()
Expand Down Expand Up @@ -86,6 +92,9 @@ def run(self) -> None:
sleep(next_sleep)

def store_stats(self, current_stats: dict) -> None:
"""
Send the results for this iteration to ClickHouse.
"""
stats = json.dumps(current_stats)

insert = f"""INSERT INTO {self.ch_stats_table} (run_id, stats) FORMAT CSV"""
Expand All @@ -105,6 +114,9 @@ def store_stats(self, current_stats: dict) -> None:
response.raise_for_status()

def get_clickhouse_stats(self):
"""
Get the current state of ClickHouse for this iteration.
"""
select = f"""
SELECT
count(*) as ttl_count,
Expand Down Expand Up @@ -132,6 +144,9 @@ def get_clickhouse_stats(self):
}

def get_celery_stats(self):
"""
Get the current state of Celery for this iteration.
"""
r = redis.Redis.from_url(settings.BROKER_URL)
lms_queue = r.llen("edx.lms.core.default")
cms_queue = r.llen("edx.cms.core.default")
Expand All @@ -144,6 +159,9 @@ def get_celery_stats(self):
}

def get_redis_bus_stats(self):
"""
Get the current state of redis for this iteration.
"""
r = redis.Redis.from_url(settings.EVENT_BUS_REDIS_CONNECTION_URL)
info = r.xinfo_stream("openedx-analytics", full=True)

Expand All @@ -161,6 +179,9 @@ def get_redis_bus_stats(self):
return consumer_stats

def get_kafka_bus_stats(self):
"""
Get the current state of ClickHouse for this iteration.
"""
if not confluent_kafka: # pragma: no cover
raise CommandError(
"Trying to monitor Kafka bus, but confluent_kafka is not installed"
Expand Down Expand Up @@ -222,6 +243,9 @@ def get_kafka_bus_stats(self):
return consumer_stats

def _call_vector_graphql(self):
"""
Make the actual GraphQL call to the Vector API.
"""
# FIXME: Pull this from settings
url = "http://vector:8686/graphql"
query = """
Expand All @@ -242,11 +266,14 @@ def _call_vector_graphql(self):
}
}
"""
r = requests.post(url, json={"query": query})
r = requests.post(url, json={"query": query}, timeout=10)
r.raise_for_status()
return r.json()["data"]["sinks"]["edges"][0]["node"]["metrics"]

def get_vector_stats(self):
"""
Get the current state of Vector for this iteration.
"""
metrics = self._call_vector_graphql()

# These will be null until events start arriving
Expand Down Expand Up @@ -293,7 +320,7 @@ def add_arguments(self, parser: Any) -> None:
help="Backend used to send events to ClickHouse",
)

def handle(self, **options):
def handle(self, *_, **options):
"""
Creates users and triggers events for them as configured above.
"""
Expand Down

0 comments on commit d890bfd

Please sign in to comment.