Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
rdmolony committed Jan 8, 2024
1 parent 68218cc commit dfd9b4d
Show file tree
Hide file tree
Showing 14 changed files with 260 additions and 47 deletions.
6 changes: 6 additions & 0 deletions sensor/api/serializers.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
from rest_framework import serializers

from ..models import File
from ..models import FileType


class FileSerializer(serializers.ModelSerializer):

type = serializers.SlugRelatedField(
slug_field="name", queryset=FileType.objects.all()
)

class Meta:
model = File
fields = '__all__'
Expand Down
3 changes: 0 additions & 3 deletions sensor/api/viewsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,5 @@


class FileViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list` and `retrieve` actions.
"""
queryset = File.objects.all()
serializer_class = FileSerializer
1 change: 0 additions & 1 deletion sensor/api_urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
def api_root(request, format=None):
return Response({
'files': reverse('api:sensor:file-list', request=request, format=format),
'echo': reverse('api:sensor:echo', request=request, format=format),
})


Expand Down
3 changes: 2 additions & 1 deletion sensor/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,10 @@ def validate_datetime_fieldnames_in_lines(
datetime_fieldnames: typing.Iterable[str],
) -> None:

split_lines = yield_split_lines(lines=lines, encoding=encoding, delimiter=delimiter)
fieldnames = None

for line in yield_split_lines(lines=lines, encoding=encoding, delimiter=delimiter):
for line in split_lines:
if set(datetime_fieldnames).issubset(set(line)):
fieldnames = line
break
Expand Down
35 changes: 17 additions & 18 deletions sensor/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.db import transaction
from django.core.exceptions import ValidationError

from .io import validate_datetime_fieldnames_in_lines
from .io import yield_readings_in_narrow_format
Expand Down Expand Up @@ -101,31 +102,30 @@ class File(models.Model):
hash = models.TextField(blank=True, null=True)

def clean(self):
# NOTE: automatically called by Django Forms & DRF Serializer Validate Method
with self.file.open(mode="rb") as f:
validate_datetime_fieldnames_in_lines(
lines=f,
encoding=self.type.encoding,
delimiter=self.type.delimiter,
datetime_fieldnames=self.type.datetime_fieldnames,
)

def import_to_db(self):
if self.type is None:
raise ValidationError("File type must be specified!")

if not self.type:
message = (
"Please define this file's type"
+ " before attempting to parse it"
+ " so the file's `encoding`, `delimiter`, `datetime_fieldnames`"
+ " etc are defined!"
)
raise ValueError(message)
# NOTE: This file is automatically closed upon saving a model instance
# ... each time a file is read the file pointer must be reset to enable rereads
f = self.file.open(mode="rb")

# NOTE: automatically called by Django Forms & DRF Serializer Validate Method
validate_datetime_fieldnames_in_lines(
lines=f,
encoding=self.type.encoding,
delimiter=self.type.delimiter,
datetime_fieldnames=self.type.datetime_fieldnames,
)
self.file.seek(0)

def import_to_db(self):

with self.file.open(mode="rb") as f:

reading_objs = (
Reading(
file=self,
timestamp=r["timestamp"],
sensor_name=r["sensor_name"],
reading=r["reading"]
Expand All @@ -150,7 +150,6 @@ def import_to_db(self):
Reading.objects.bulk_create(batch, batch_size)

except Exception as e:
breakpoint()
self.parsed_at = None
self.parse_error = str(e)
self.save()
Expand Down
4 changes: 2 additions & 2 deletions sensor/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def create_file_type(request):
form.save()
return HttpResponse("File type was created")
else:
return HttpResponse("File type creation failed")
return HttpResponse(f"File type creation failed: {form.errors}")
else:
form = FileTypeForm()
return render(request, "create_file_type.html", {"form": form})
Expand All @@ -25,7 +25,7 @@ def upload_file(request):
form.save()
return HttpResponse("File upload was successful")
else:
return HttpResponse("File upload failed")
return HttpResponse(f"File type creation failed: {form.errors}")
else:
form = FileForm()
return render(request, "upload_file.html", {"form": form})
Expand Down
17 changes: 17 additions & 0 deletions tests/globals.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
SOURCES = [
{
"lines": [
b"Lat=0 Lon=0 Hub-Height=160 Timezone=00.0 Terrain-Height=0.0",
b"Computed at 100 m resolution",
b" ",
b"YYYYMMDD HHMM M(m/s) D(deg) SD(m/s) DSD(deg) Gust3s(m/s) T(C) PRE(hPa) RiNumber VertM(m/s)",
b"20151222 0000 20.54 211.0 1.22 0.3 21.00 11.9 992.8 0.15 0.18",
b"20151222 0010 21.02 212.2 2.55 0.6 21.35 11.8 992.7 0.29 -0.09",
],
"encoding": "utf-8",
"delimiter": "\s+",
"datetime_fieldnames": ["YYYYMMDD", "HHMM"],
"datetime_formats": [r"%Y%m%d %H%M"],
"na_values": ["NAN"],
}
]
5 changes: 1 addition & 4 deletions tests/sensor/__snapshots__/test_io.ambr
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
# serializer version: 1
# name: test_import_to_db[lines0-utf-8-\\s+-datetime_fieldnames0-datetime_formats0-na_values0]
<QuerySet [<Reading: M(m/s) @ 2015-12-22 00:00:00 = 20.54>, <Reading: D(deg) @ 2015-12-22 00:00:00 = 211.0>, <Reading: SD(m/s) @ 2015-12-22 00:00:00 = 1.22>, <Reading: DSD(deg) @ 2015-12-22 00:00:00 = 0.3>, <Reading: Gust3s(m/s) @ 2015-12-22 00:00:00 = 21.00>, <Reading: T(C) @ 2015-12-22 00:00:00 = 11.9>, <Reading: PRE(hPa) @ 2015-12-22 00:00:00 = 992.8>, <Reading: RiNumber @ 2015-12-22 00:00:00 = 0.15>, <Reading: VertM(m/s) @ 2015-12-22 00:00:00 = 0.18>, <Reading: @ 2015-12-22 00:00:00 = >, <Reading: M(m/s) @ 2015-12-22 00:10:00 = 21.02>, <Reading: D(deg) @ 2015-12-22 00:10:00 = 212.2>, <Reading: SD(m/s) @ 2015-12-22 00:10:00 = 2.55>, <Reading: DSD(deg) @ 2015-12-22 00:10:00 = 0.6>, <Reading: Gust3s(m/s) @ 2015-12-22 00:10:00 = 21.35>, <Reading: T(C) @ 2015-12-22 00:10:00 = 11.8>, <Reading: PRE(hPa) @ 2015-12-22 00:10:00 = 992.7>, <Reading: RiNumber @ 2015-12-22 00:10:00 = 0.29>, <Reading: VertM(m/s) @ 2015-12-22 00:10:00 = -0.09>]>
# ---
# name: test_yield_readings[lines0-utf-8-\\s+-datetime_fieldnames0-datetime_formats0]
# name: test_yield_readings_in_narrow_format[lines0-utf-8-\\s+-datetime_fieldnames0-datetime_formats0]
list([
dict({
'reading': '20.54',
Expand Down
4 changes: 4 additions & 0 deletions tests/sensor/__snapshots__/test_models.ambr
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# serializer version: 1
# name: test_import_to_db[lines0-utf-8-\\s+-datetime_fieldnames0-datetime_formats0-na_values0]
<QuerySet [<Reading: M(m/s) @ 2015-12-22 00:00:00 = 20.54>, <Reading: D(deg) @ 2015-12-22 00:00:00 = 211.0>, <Reading: SD(m/s) @ 2015-12-22 00:00:00 = 1.22>, <Reading: DSD(deg) @ 2015-12-22 00:00:00 = 0.3>, <Reading: Gust3s(m/s) @ 2015-12-22 00:00:00 = 21.00>, <Reading: T(C) @ 2015-12-22 00:00:00 = 11.9>, <Reading: PRE(hPa) @ 2015-12-22 00:00:00 = 992.8>, <Reading: RiNumber @ 2015-12-22 00:00:00 = 0.15>, <Reading: VertM(m/s) @ 2015-12-22 00:00:00 = 0.18>, <Reading: @ 2015-12-22 00:00:00 = >, <Reading: M(m/s) @ 2015-12-22 00:10:00 = 21.02>, <Reading: D(deg) @ 2015-12-22 00:10:00 = 212.2>, <Reading: SD(m/s) @ 2015-12-22 00:10:00 = 2.55>, <Reading: DSD(deg) @ 2015-12-22 00:10:00 = 0.6>, <Reading: Gust3s(m/s) @ 2015-12-22 00:10:00 = 21.35>, <Reading: T(C) @ 2015-12-22 00:10:00 = 11.8>, <Reading: PRE(hPa) @ 2015-12-22 00:10:00 = 992.7>, <Reading: RiNumber @ 2015-12-22 00:10:00 = 0.29>, <Reading: VertM(m/s) @ 2015-12-22 00:10:00 = -0.09>]>
# ---
4 changes: 4 additions & 0 deletions tests/sensor/__snapshots__/test_views.ambr
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# serializer version: 1
# name: TestUploadFile.test_cannot_upload_an_invalid_file[lines0-utf-8-\\s+-datetime_fieldnames0-datetime_formats0-na_values0]
b'File type creation failed: <ul class="errorlist"><li>__all__<ul class="errorlist nonfield"><li>No `datetime_fieldnames` [&#x27;YYYYMMDD&#x27;, &#x27;HHMM&#x27;] found!</li></ul></li></ul>'
# ---
7 changes: 7 additions & 0 deletions tests/sensor/api/__snapshots__/test_viewsets.ambr
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# serializer version: 1
# name: TestUploadFile.test_cannot_upload_a_missing_file_type[lines0-utf-8-\\s+-datetime_fieldnames0-datetime_formats0-na_values0]
b'{"type":["This field may not be null."]}'
# ---
# name: TestUploadFile.test_cannot_upload_an_invalid_file[lines0-utf-8-\\s+-datetime_fieldnames0-datetime_formats0-na_values0]
b'{"non_field_errors":["No `datetime_fieldnames` [\'YYYYMMDD\', \'HHMM\'] found!"]}'
# ---
112 changes: 112 additions & 0 deletions tests/sensor/api/test_viewsets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
from http import HTTPStatus

from django.core.files.uploadedfile import SimpleUploadedFile
import pytest
from rest_framework.reverse import reverse

from sensor.models import FileType
from tests.globals import SOURCES


@pytest.mark.django_db
@pytest.mark.parametrize(
"lines,encoding,delimiter,datetime_fieldnames,datetime_formats,na_values",
[
(
source["lines"],
source["encoding"],
source["delimiter"],
source["datetime_fieldnames"],
source["datetime_formats"],
source["na_values"],
)
for source in SOURCES
]
)
class TestUploadFile():

def test_can_upload_a_valid_payload(
self,
client,
lines,
encoding,
delimiter,
datetime_fieldnames,
datetime_formats,
na_values,
):
file_type = FileType.objects.create(
name="type",
encoding=encoding,
delimiter=delimiter,
datetime_fieldnames=datetime_fieldnames,
datetime_formats=datetime_formats,
na_values=na_values,
)
file = SimpleUploadedFile(
name="sensor-readings.txt", content=b"\n".join(l for l in lines),
)
url = reverse("api:sensor:file-list")

response = client.post(
url,
{"file": file, "type": file_type.name}
)

assert response.status_code == HTTPStatus.CREATED

def test_cannot_upload_an_invalid_file(
self,
client,
lines,
encoding,
delimiter,
datetime_fieldnames,
datetime_formats,
na_values,
snapshot,
):
file_type = FileType.objects.create(
name="type",
encoding=encoding,
delimiter=delimiter,
datetime_fieldnames=datetime_fieldnames,
datetime_formats=datetime_formats,
na_values=na_values,
)
invalid_file = SimpleUploadedFile(
name="sensor-readings.txt", content=b"I am invalid!",)
url = reverse("api:sensor:file-list")

response = client.post(
url,
{"file": invalid_file, "type": file_type.name}
)

assert response.status_code == HTTPStatus.BAD_REQUEST
assert response.content == snapshot


def test_cannot_upload_a_missing_file_type(
self,
client,
lines,
encoding,
delimiter,
datetime_fieldnames,
datetime_formats,
na_values,
snapshot,
):
file = SimpleUploadedFile(
name="sensor-readings.txt", content=b"\n".join(l for l in lines),
)
url = reverse("api:sensor:file-list")

response = client.post(
url,
{"file": file, "type": ""}
)

assert response.status_code == HTTPStatus.BAD_REQUEST
assert response.content == snapshot
19 changes: 1 addition & 18 deletions tests/sensor/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,7 @@
from sensor.models import FileType
from sensor.models import Reading


SOURCES = [
{
"lines": [
b"Lat=0 Lon=0 Hub-Height=160 Timezone=00.0 Terrain-Height=0.0",
b"Computed at 100 m resolution",
b" ",
b"YYYYMMDD HHMM M(m/s) D(deg) SD(m/s) DSD(deg) Gust3s(m/s) T(C) PRE(hPa) RiNumber VertM(m/s)",
b"20151222 0000 20.54 211.0 1.22 0.3 21.00 11.9 992.8 0.15 0.18",
b"20151222 0010 21.02 212.2 2.55 0.6 21.35 11.8 992.7 0.29 -0.09",
],
"encoding": "utf-8",
"delimiter": "\s+",
"datetime_fieldnames": ["YYYYMMDD", "HHMM"],
"datetime_formats": [r"%Y%m%d %H%M"],
"na_values": ["NAN"],
}
]
from tests.globals import SOURCES


@pytest.mark.django_db
Expand Down
Loading

0 comments on commit dfd9b4d

Please sign in to comment.