Skip to content

Commit

Permalink
Merge pull request #546 from RedHatInsights/use-double-quotes
Browse files Browse the repository at this point in the history
Unified quotes usage
  • Loading branch information
tisnik authored Dec 18, 2023
2 parents 1d3608d + 61f315d commit 84cf2a3
Show file tree
Hide file tree
Showing 10 changed files with 36 additions and 36 deletions.
2 changes: 1 addition & 1 deletion features/src/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def check(output: List):
if '"Version:' in line:
version = line.split("Version: ")[-1][:-2]
# we need to distinguish between semantic version and commit SHA
if version.startswith('v'):
if version.startswith("v"):
try:
semver.Version.parse(version[1:])
print(f"{version} is a valid semantic version.")
Expand Down
32 changes: 16 additions & 16 deletions features/src/version_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,24 +36,24 @@ def test_missing_version():

def test_correct_version():
"""Check how version check function handled input with proper version."""
check(["this", "is", "correct", "version", "\"Version: v1.2.3\"}"])
check(["this", "is", "correct", "version", '"Version: v1.2.3"}'])


def test_correct_sha():
"""Check how version check function handled input with proper commit SHA."""
check(["this", "is", "correct", "version",
"\"Version: abc00defabc00defabc00defabc00defabc00def\"}"])
'"Version: abc00defabc00defabc00defabc00defabc00def"}'])


improper_versions = (
"\"Version: v1\"}",
"\"Version: v1.2\"}",
"\"Version: v1.2.A\"}",
"\"Version: v1.A.3\"}",
"\"Version: vA.2.3\"}",
"\"Version: v1.2.3}",
"\"Version: v1.2.3",
"\"Version: v1.2.3\"}",
'"Version: v1"}',
'"Version: v1.2"}',
'"Version: v1.2.A"}',
'"Version: v1.A.3"}',
'"Version: vA.2.3"}',
'"Version: v1.2.3}',
'"Version: v1.2.3',
'"Version: v1.2.3"}',
)


Expand All @@ -65,12 +65,12 @@ def test_incorrect_version(version):


improper_shas = (
"\"Version abc00defabc00defabc00defabc00defabc00def\"}",
"\"Version: abc00defabc00defabc00defabc00defabc00def\"}",
"\"Version: abc00defabc00defabc00defabc00defabc00de\"}",
"\"Version: abc00defabc00defabc00defabc00defabc00def}",
"\"Version: abc00defabc00defabc00defabc00defabc00def\"",
"\"Version: abc00defabc00defabc00defabc00defabc00def",
'"Version abc00defabc00defabc00defabc00defabc00def"}',
'"Version: abc00defabc00defabc00defabc00defabc00def"}',
'"Version: abc00defabc00defabc00defabc00defabc00de"}',
'"Version: abc00defabc00defabc00defabc00defabc00def}',
'"Version: abc00defabc00defabc00defabc00defabc00def"',
'"Version: abc00defabc00defabc00defabc00defabc00def',
)


Expand Down
2 changes: 1 addition & 1 deletion features/steps/common_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def access_rest_api_endpoint_post(context, endpoint):
context.response = requests.post(url, json=data)


@then("The status message of the response is \"{expected_message}\"")
@then('The status message of the response is "{expected_message}"')
def check_status_of_response(context, expected_message):
"""Check the actual message/value in status attribute."""
assert context.response is not None, "Send request to service first"
Expand Down
2 changes: 1 addition & 1 deletion features/steps/insights_results_aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ def send_rules_results_to_kafka(context, filename, topic):
full_path = f"{DATA_DIRECTORY}/{filename}"
with open(full_path, "r") as fin:
payload = fin.read().encode("utf-8")
if hasattr(context, 'kafka_hostname') and hasattr(context, 'kafka_port'):
if hasattr(context, "kafka_hostname") and hasattr(context, "kafka_port"):
kafka_util.send_event(f"{context.kafka_hostname}:{context.kafka_port}", topic, payload)
else:
# try localhost or raise exception
Expand Down
20 changes: 10 additions & 10 deletions features/steps/insights_sha_extractor_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@
def create_presigned_url(s3_client, bucket_name, object_name, expiration=3600):
"""Generate a presigned URL to share an S3 object."""
try:
response = s3_client.generate_presigned_url('get_object',
Params={'Bucket': bucket_name,
'Key': object_name},
response = s3_client.generate_presigned_url("get_object",
Params={"Bucket": bucket_name,
"Key": object_name},
ExpiresIn=expiration)
except ClientError as e:
logging.error(e)
Expand All @@ -50,17 +50,17 @@ def use_real_storage(context, archive_key, msg_path):
s3_access_key = os.getenv("S3_ACCESS_KEY")
s3_secret_access_key = os.getenv("S3_SECRET_ACCESS_KEY")

s3_client = boto3.client('s3',
endpoint_url=f'http://{s3_host}:{s3_port}',
s3_client = boto3.client("s3",
endpoint_url=f"http://{s3_host}:{s3_port}",
aws_access_key_id=s3_access_key,
aws_secret_access_key=s3_secret_access_key)

try:
s3_client.head_bucket(Bucket='test')
s3_client.head_bucket(Bucket="test")
except ClientError:
s3_client.create_bucket(Bucket='test')
s3_client.create_bucket(Bucket="test")

with open(f"test_data/{archive_key}.tar.gz", 'rb') as archive:
with open(f"test_data/{archive_key}.tar.gz", "rb") as archive:
s3_client.put_object(Body=archive, Bucket="test", Key=archive_key)

topic_name = context.__dict__["_stack"][0]["incoming_topic"]
Expand All @@ -70,7 +70,7 @@ def use_real_storage(context, archive_key, msg_path):
msg = f.read().encode("utf-8")
event_data = json.loads(msg)
event_data["url"] = presigned_url
event_data = json.dumps(event_data).encode('utf-8')
event_data = json.dumps(event_data).encode("utf-8")
headers = [("service", b"testareno")]
kafka_util.send_event(context.hostname, topic_name, event_data, headers)

Expand All @@ -85,7 +85,7 @@ def use_mock_storage(context, archive_key, msg_path):
kafka_util.send_event(context.hostname, topic_name, event_data, headers)


@when('S3 and Kafka are populated with an archive {with_or_without} workload_info')
@when("S3 and Kafka are populated with an archive {with_or_without} workload_info")
def populate_s3(context, with_or_without):
"""Try to load archive to real S3 storage and publish JSON message to Kafka."""
if with_or_without == "with":
Expand Down
4 changes: 2 additions & 2 deletions features/steps/parquet_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def check_logs_table(context):
assert ok, \
f'topic {row["topic"]}, partition {row["partition"]}, ' + \
f'offset {row["offset"]}, message {row["message"]}, ' + \
'not found'
"not found"


@then('The logs should contain "{log_message}"')
Expand All @@ -174,7 +174,7 @@ def check_no_logs_table(context):
assert not ok, \
f'topic {row["topic"]}, partition {row["partition"]}, ' + \
f'offset {row["offset"]}, message {row["message"]}, ' + \
'found'
"found"


def check_logs(logs: str, topic: str, partition: int,
Expand Down
4 changes: 2 additions & 2 deletions features/steps/parquet_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ def check_parquet_table_is(context, object_name):
got_df = got_df.astype(str) # in order to have same format as in want_df

want_df = want_df.sort_values(
by=['archive_path'], ascending=False).reset_index(drop=True)
by=["archive_path"], ascending=False).reset_index(drop=True)
got_df = got_df.sort_values(
by=['archive_path'], ascending=False).reset_index(drop=True)
by=["archive_path"], ascending=False).reset_index(drop=True)

assert want_df.equals(got_df), f"Got:\n{got_df}\nwant:\n{want_df}"

Expand Down
2 changes: 1 addition & 1 deletion features/steps/pushgateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def extract_labels(metric: str) -> (str, Dict):
out = {}
for pair_of_key_var in labels.split(","):
pair_of_key_var = pair_of_key_var.replace(
'"', '') # Remove additional captions
'"', "") # Remove additional captions
key, val = pair_of_key_var.split("=")
out[key] = val
return metric_name, out
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ async def render_reports(request: Request):

for cluster_id, cluster_data in data["report_data"]["reports"].items():
for report in cluster_data["reports"]:
if report["component"].endswith('.report'):
if report["component"].endswith(".report"):
report["component"] = report["component"][:-7]
reports.append(
{
Expand Down
2 changes: 1 addition & 1 deletion mocks/rhobs/rhobs_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ class Query(BaseModel):
query: str


UUID_REGEX = '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}'
UUID_REGEX = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"


@app.get("/api/metrics/v1/telemeter/api/v1/query")
Expand Down

0 comments on commit 84cf2a3

Please sign in to comment.