Skip to content

Commit

Permalink
Merge branch 'release/v0.2.7'
Browse files Browse the repository at this point in the history
  • Loading branch information
vogt31337 committed Sep 7, 2023
2 parents 4e41d94 + a7e4642 commit df91bbe
Show file tree
Hide file tree
Showing 9 changed files with 90 additions and 35 deletions.
35 changes: 35 additions & 0 deletions .github/workflows/release.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.

# GitHub recommends pinning actions to a commit SHA.
# To get a newer version, you will need to update the SHA.
# You can also reference a tag or branch, but the action may change without warning.

name: Upload Python Package

on:
release:
types: [published]

jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
8 changes: 4 additions & 4 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@ jobs:
run: |
python3 setup.py sdist --formats=zip
twine check dist/* --strict
python3 -m twine upload dist/*
python3 -m twine upload dist/* --verbose
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.TESTPYPI }}
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
TWINE_REPOSITORY: testpypi

# Upload to PyPI
Expand All @@ -58,10 +58,10 @@ jobs:
run: |
python3 setup.py sdist --formats=zip
twine check dist/* --strict
python3 -m twine upload dist/*
python3 -m twine upload dist/* --verbose
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI }}
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
TWINE_REPOSITORY: pypi
# Make sure package is available on pypi
- name: Sleep for 300s to make release available
Expand Down
2 changes: 1 addition & 1 deletion pandahub/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.2.6"
__version__ = "0.2.7"

from pandahub.lib.PandaHub import PandaHub, PandaHubError
from pandahub.client.PandaHubClient import PandaHubClient
Expand Down
1 change: 1 addition & 0 deletions pandahub/lib/PandaHub.py
Original file line number Diff line number Diff line change
Expand Up @@ -1421,6 +1421,7 @@ def bulk_write_timeseries_to_db(self, timeseries, data_type,
data_type,
ts_format=ts_format,
compress_ts_data=compress_ts_data,
element_index=col,
**args)
documents.append(doc)
self.bulk_write_to_db(documents, collection_name=collection_name,
Expand Down
5 changes: 3 additions & 2 deletions pandahub/lib/database_toolbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,11 +184,12 @@ def create_timeseries_document(timeseries,
"compressed_ts_data": compress_ts_data}
document = add_timestamp_info_to_document(document, timeseries, ts_format)
document = {**document, **kwargs}

if not "_id" in document: # IDs set by users will not be overwritten
document["_id"] = get_document_hash(document)

if compress_ts_data:
document["timeseries_data"] = compress_timeseries_data(timeseries,
ts_format)
document["timeseries_data"] = compress_timeseries_data(timeseries, ts_format)
else:
if ts_format == "timestamp_value":
document["timeseries_data"] = convert_timeseries_to_subdocuments(timeseries)
Expand Down
1 change: 1 addition & 0 deletions pandahub/test/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

def test_client_io(phc: PandaHubClient):
phc.set_active_project("Manderbach")

net = nw.mv_oberrhein()
phc.write_network_to_db(net, name='mv_oberrhein', overwrite=True)
net_loaded = phc.get_net_from_db(name='mv_oberrhein')
Expand Down
18 changes: 18 additions & 0 deletions pandahub/test/test_networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,24 @@
from pandapipes.toolbox import nets_equal


def test_additional_res_tables(ph):
import pandas as pd
ph.set_active_project("pytest")

# reset project aka delete everything
db = ph._get_project_database()
for cname in db.list_collection_names():
db.drop_collection(cname)

net1 = pp.create_empty_network()
net1['res_test'] = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]})
ph.write_network_to_db(net1, 'test')
net2 = ph.get_net_from_db('test')

assert('res_test' in net2)
assert(net1.res_test.shape == (2,2))


def test_network_io(ph):
ph.set_active_project("pytest")
# reset project aka delete everything
Expand Down
53 changes: 26 additions & 27 deletions pandahub/test/test_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
def test_from_tutorial(ph):
ph.set_active_project(project)
net = nw.simple_mv_open_ring_net()
p_mw_profiles = np.random.randint(low=0, high=100, size=(35040, len(net.load))) / 100 * net.load.p_mw.values
q_mvar_profiles = np.ones((35040, len(net.load)))
p_mw_profiles = np.random.randint(low=0, high=100, size=(35041, len(net.load))) / 100 * net.load.p_mw.values
q_mvar_profiles = np.ones((35041, len(net.load)))
timestamps = pd.date_range(start="01/01/2020", end="31/12/2020", freq="15min")
p_mw_profiles = pd.DataFrame(p_mw_profiles, index=timestamps)
weekindex = p_mw_profiles.index[0:(7 * 96)]
Expand Down Expand Up @@ -45,10 +45,10 @@ def test_from_tutorial(ph):
datetime.datetime(2020, 1, 8, 0, 0)))

assert (result.keys() == ['p_mw', 'q_mvar']).all()
assert result.size == 70080
assert result.size == 70082
assert result.index.dtype == "<M8[ns]"
assert np.isclose(result.p_mw.sum(), p_mw_profiles.sum()[0])
assert len(week) == 671
assert len(week) == 672
# 17348.07

def test_simbench_sinlge_ts(ph):
Expand Down Expand Up @@ -141,28 +141,27 @@ def test_del_single_ts_on_db(ph):
def test_bulk_ts_on_db(ph):
ph.set_active_project(project)
# write bulk cts to db
p_mw_profiles = np.random.randint(low=0, high=100, size=(35041, 10))
p_mw_profiles = np.random.randint(low=0, high=100, size=(35041, 10)).astype(float)
timestamps = pd.date_range(start="01/01/2020", end="31/12/2020", freq="15min")
p_mw_profiles_no_time = pd.DataFrame(copy.deepcopy(p_mw_profiles))
p_mw_profiles = pd.DataFrame(p_mw_profiles, index=timestamps).convert_dtypes(int)
ph.bulk_write_timeseries_to_db(p_mw_profiles, element_type="load", data_type="p_mw",
netname="bulk_write_net", collection_name="test_collection")
p_mw_profiles = pd.DataFrame(p_mw_profiles, index=timestamps)

ph.bulk_write_timeseries_to_db(p_mw_profiles,
element_type="load",
data_type="p_mw",
netname="bulk_write_net",
collection_name="test_collection")

result = ph.bulk_get_timeseries_from_db({"netname": 'bulk_write_net',
"element_type": "load",
},
collection_name="test_collection",
pivot_by_column="element_index"
)
result = ph.bulk_get_timeseries_from_db({"netname": 'bulk_write_net', "element_type": "load",},
collection_name="test_collection",
pivot_by_column="element_index")



bulk_week = ph.bulk_get_timeseries_from_db({"netname": 'bulk_write_net',
"element_type": "load",
}, collection_name="test_collection"
, pivot_by_column="element_index",
timestamp_range=(datetime.datetime(2020, 1, 1, 0, 0),
bulk_week = ph.bulk_get_timeseries_from_db({"netname": 'bulk_write_net', "element_type": "load",},
collection_name="test_collection",
pivot_by_column="element_index",
timestamp_range=(datetime.datetime(2020, 1, 1, 0, 0),
datetime.datetime(2020, 1, 8, 0, 0)))


Expand All @@ -176,8 +175,8 @@ def test_bulk_ts_on_db(ph):
pivot_by_column="element_index"
)

assert bulk_week.size == 6710
assert result_no_time.size == 350400
assert bulk_week.size == 6720
assert result_no_time.size == 350410
assert result.sum().sum() == p_mw_profiles.sum().sum()


Expand All @@ -196,10 +195,10 @@ def test_bulk_ts_on_db(ph):

def test_add_metadata(ph):
ph.set_active_project(project)
p_mw_profiles = np.random.randint(low=0, high=100, size=(25, 10))
p_mw_profiles = np.random.randint(low=0, high=100, size=(25, 10)).astype(float)
timestamps = pd.date_range(start="01/01/2020", end="01/02/2020",
freq="60min")
p_mw_profiles = pd.DataFrame(p_mw_profiles, index=timestamps).convert_dtypes(int)
p_mw_profiles = pd.DataFrame(p_mw_profiles, index=timestamps)
ph.write_timeseries_to_db(p_mw_profiles[0],
netname="test_add_metadata",
element_index=0,
Expand Down Expand Up @@ -227,14 +226,14 @@ def test_add_metadata(ph):
# check for new metadata
meta_after = ph.get_timeseries_metadata(filter, collection_name="test_collection")

assert len(meta_after.columns) == 12
assert len(meta_before.columns) == 11
assert len(meta_after.columns) == 13
assert len(meta_before.columns) == 12

def test_bulk_write_with_meta(ph):
ph.set_active_project(project)
p_mw_profiles = np.random.randint(low=0, high=100, size=(97, 10))
p_mw_profiles = np.random.randint(low=0, high=100, size=(97, 10)).astype(float)
timestamps = pd.date_range(start="01/01/2020", end="01/02/2020", freq="15min")
p_mw_profiles = pd.DataFrame(p_mw_profiles, index=timestamps).convert_dtypes(int)
p_mw_profiles = pd.DataFrame(p_mw_profiles, index=timestamps)
meta = pd.DataFrame(p_mw_profiles.max(), columns=["max"], dtype=object)

ph.bulk_write_timeseries_to_db(p_mw_profiles, element_type="meta_test_load", data_type="p_mw",
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
name='pandahub',
packages=find_packages(),
url='https://github.com/e2nIEE/pandahub',
version='0.2.6',
version='0.2.7',
include_package_data=True,
long_description_content_type='text/markdown',
zip_safe=False,
Expand Down

0 comments on commit df91bbe

Please sign in to comment.