Skip to content

Commit

Permalink
Merge pull request #134 from axiomhq/arne/spring-cleaning-4
Browse files Browse the repository at this point in the history
Spring cleaning #4
  • Loading branch information
bahlo authored Sep 12, 2024
2 parents 37625ec + ae9221f commit abd1bc7
Show file tree
Hide file tree
Showing 5 changed files with 90 additions and 88 deletions.
10 changes: 5 additions & 5 deletions src/axiom_py/annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from dataclasses import dataclass, asdict, field
from datetime import datetime
from urllib.parse import urlencode
from .util import Util
from .util import from_dict


@dataclass
Expand Down Expand Up @@ -68,7 +68,7 @@ def get(self, id: str) -> Annotation:
path = "/v2/annotations/%s" % id
res = self.session.get(path)
decoded_response = res.json()
return Util.from_dict(Annotation, decoded_response)
return from_dict(Annotation, decoded_response)

def create(self, req: AnnotationCreateRequest) -> Annotation:
"""
Expand All @@ -78,7 +78,7 @@ def create(self, req: AnnotationCreateRequest) -> Annotation:
"""
path = "/v2/annotations"
res = self.session.post(path, data=ujson.dumps(asdict(req)))
annotation = Util.from_dict(Annotation, res.json())
annotation = from_dict(Annotation, res.json())
self.logger.info(f"created new annotation: {annotation.id}")
return annotation

Expand Down Expand Up @@ -106,7 +106,7 @@ def list(

annotations = []
for record in res.json():
ds = Util.from_dict(Annotation, record)
ds = from_dict(Annotation, record)
annotations.append(ds)

return annotations
Expand All @@ -119,7 +119,7 @@ def update(self, id: str, req: AnnotationUpdateRequest) -> Annotation:
"""
path = "/v2/annotations/%s" % id
res = self.session.put(path, data=ujson.dumps(asdict(req)))
annotation = Util.from_dict(Annotation, res.json())
annotation = from_dict(Annotation, res.json())
self.logger.info(f"updated annotation({annotation.id})")
return annotation

Expand Down
18 changes: 8 additions & 10 deletions src/axiom_py/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import gzip
import ujson
import os
from .util import Util
from enum import Enum
from humps import decamelize
from typing import Optional, List, Dict
Expand All @@ -25,6 +24,7 @@
from .annotations import AnnotationsClient
from .users import UsersClient
from .version import __version__
from .util import from_dict, handle_json_serialization, is_personal_token


AXIOM_URL = "https://api.axiom.co"
Expand Down Expand Up @@ -174,7 +174,7 @@ def __init__(
self.session.headers.update({"X-Axiom-Org-Id": org_id})

self.datasets = DatasetsClient(self.session, self.logger)
self.users = UsersClient(self.session)
self.users = UsersClient(self.session, is_personal_token(token))
self.annotations = AnnotationsClient(self.session, self.logger)

# wrap shutdown hook in a lambda passing in self as a ref
Expand Down Expand Up @@ -213,7 +213,7 @@ def ingest(
path, data=payload, headers=headers, params=params
)
status_snake = decamelize(res.json())
return Util.from_dict(IngestStatus, status_snake)
return from_dict(IngestStatus, status_snake)

def ingest_events(
self,
Expand All @@ -228,7 +228,7 @@ def ingest_events(
"""
# encode request payload to NDJSON
content = ndjson.dumps(
events, default=Util.handle_json_serialization
events, default=handle_json_serialization
).encode("UTF-8")
gzipped = gzip.compress(content)

Expand All @@ -251,13 +251,11 @@ def query_legacy(
)

path = "/v1/datasets/%s/query" % id
payload = ujson.dumps(
asdict(query), default=Util.handle_json_serialization
)
payload = ujson.dumps(asdict(query), default=handle_json_serialization)
self.logger.debug("sending query %s" % payload)
params = self._prepare_query_options(opts)
res = self.session.post(path, data=payload, params=params)
result = Util.from_dict(QueryLegacyResult, res.json())
result = from_dict(QueryLegacyResult, res.json())
self.logger.debug(f"query result: {result}")
query_id = res.headers.get("X-Axiom-History-Query-Id")
self.logger.info(f"received query result with query_id: {query_id}")
Expand Down Expand Up @@ -285,12 +283,12 @@ def query(
path = "/v1/datasets/_apl"
payload = ujson.dumps(
self._prepare_apl_payload(apl, opts),
default=Util.handle_json_serialization,
default=handle_json_serialization,
)
self.logger.debug("sending query %s" % payload)
params = self._prepare_apl_options(opts)
res = self.session.post(path, data=payload, params=params)
result = Util.from_dict(QueryResult, res.json())
result = from_dict(QueryResult, res.json())
self.logger.debug(f"apl query result: {result}")
query_id = res.headers.get("X-Axiom-History-Query-Id")
self.logger.info(f"received query result with query_id: {query_id}")
Expand Down
10 changes: 5 additions & 5 deletions src/axiom_py/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from typing import List
from dataclasses import dataclass, asdict, field
from datetime import timedelta
from .util import Util
from .util import from_dict


@dataclass
Expand Down Expand Up @@ -65,7 +65,7 @@ def get(self, id: str) -> Dataset:
path = "/v1/datasets/%s" % id
res = self.session.get(path)
decoded_response = res.json()
return Util.from_dict(Dataset, decoded_response)
return from_dict(Dataset, decoded_response)

def create(self, name: str, description: str = "") -> Dataset:
"""
Expand All @@ -85,7 +85,7 @@ def create(self, name: str, description: str = "") -> Dataset:
)
),
)
ds = Util.from_dict(Dataset, res.json())
ds = from_dict(Dataset, res.json())
self.logger.info(f"created new dataset: {ds.name}")
return ds

Expand All @@ -100,7 +100,7 @@ def get_list(self) -> List[Dataset]:

datasets = []
for record in res.json():
ds = Util.from_dict(Dataset, record)
ds = from_dict(Dataset, record)
datasets.append(ds)

return datasets
Expand All @@ -122,7 +122,7 @@ def update(self, id: str, new_description: str) -> Dataset:
)
),
)
ds = Util.from_dict(Dataset, res.json())
ds = from_dict(Dataset, res.json())
self.logger.info(
f"updated dataset({ds.name}) with new desc: {ds.description}"
)
Expand Down
16 changes: 12 additions & 4 deletions src/axiom_py/users.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from .util import Util
from .util import from_dict
from dataclasses import dataclass
from requests import Session
from typing import Optional


@dataclass
Expand All @@ -22,15 +23,22 @@ class User:
class UsersClient:
"""The UsersClient is a client for the Axiom Users service."""

def __init__(self, session: Session):
has_personal_token: bool

def __init__(self, session: Session, has_personal_token: bool):
self.session = session
self.has_personal_token = has_personal_token

def current(self) -> User:
def current(self) -> Optional[User]:
"""
Get the current authenticated user.
If your token is not a personal token, this will return None.
See https://axiom.co/docs/restapi/endpoints/getCurrentUser
"""
if not self.has_personal_token:
return None

res = self.session.get("/v2/user")
user = Util.from_dict(User, res.json())
user = from_dict(User, res.json())
return user
124 changes: 60 additions & 64 deletions src/axiom_py/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,67 +14,63 @@
T = TypeVar("T")


class Util:
"""A collection of helper methods."""

@classmethod
def from_dict(cls, data_class: Type[T], data) -> T:
cfg = dacite.Config(
type_hooks={
QueryKind: QueryKind,
datetime: cls.convert_string_to_datetime,
AggregationOperation: AggregationOperation,
FilterOperation: FilterOperation,
MessageCode: MessageCode,
MessagePriority: MessagePriority,
timedelta: cls.convert_string_to_timedelta,
}
)

return dacite.from_dict(data_class=data_class, data=data, config=cfg)

@classmethod
def convert_string_to_datetime(cls, val: str) -> datetime:
d = iso8601.parse_date(val)
return d

@classmethod
def convert_string_to_timedelta(cls, val: str) -> timedelta:
if val == "0":
return timedelta(seconds=0)

exp = "^([0-9]?)([a-z])$"
import re

found = re.search(exp, val)
if not found:
raise Exception(
f"failed to parse timedelta field from value {val}"
)

v = int(found.groups()[0])
unit = found.groups()[1]

if unit == "s":
return timedelta(seconds=v)
elif unit == "m":
return timedelta(minutes=v)
elif unit == "h":
return timedelta(hours=v)
elif unit == "d":
return timedelta(days=v)
else:
raise Exception(
f"failed to parse timedelta field from value {val}"
)

@classmethod
def handle_json_serialization(cls, obj):
if isinstance(obj, datetime):
return obj.isoformat("T") + "Z"
elif isinstance(obj, timedelta):
return str(obj.seconds) + "s"
elif isinstance(obj, Enum):
return obj.value
elif isinstance(obj, UUID):
return str(obj)
def _convert_string_to_datetime(val: str) -> datetime:
d = iso8601.parse_date(val)
return d


def _convert_string_to_timedelta(val: str) -> timedelta:
if val == "0":
return timedelta(seconds=0)

exp = "^([0-9]?)([a-z])$"
import re

found = re.search(exp, val)
if not found:
raise Exception(f"failed to parse timedelta field from value {val}")

v = int(found.groups()[0])
unit = found.groups()[1]

if unit == "s":
return timedelta(seconds=v)
elif unit == "m":
return timedelta(minutes=v)
elif unit == "h":
return timedelta(hours=v)
elif unit == "d":
return timedelta(days=v)
else:
raise Exception(f"failed to parse timedelta field from value {val}")


def from_dict(data_class: Type[T], data) -> T:
cfg = dacite.Config(
type_hooks={
QueryKind: QueryKind,
datetime: _convert_string_to_datetime,
AggregationOperation: AggregationOperation,
FilterOperation: FilterOperation,
MessageCode: MessageCode,
MessagePriority: MessagePriority,
timedelta: _convert_string_to_timedelta,
}
)

return dacite.from_dict(data_class=data_class, data=data, config=cfg)


def handle_json_serialization(obj):
if isinstance(obj, datetime):
return obj.isoformat("T") + "Z"
elif isinstance(obj, timedelta):
return str(obj.seconds) + "s"
elif isinstance(obj, Enum):
return obj.value
elif isinstance(obj, UUID):
return str(obj)


def is_personal_token(token: str):
return token.startswith("xapt-")

0 comments on commit abd1bc7

Please sign in to comment.