diff --git a/.github/workflows/release.example b/.github/workflows/release.example new file mode 100644 index 0000000..80e54d1 --- /dev/null +++ b/.github/workflows/release.example @@ -0,0 +1,35 @@ +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +# GitHub recommends pinning actions to a commit SHA. +# To get a newer version, you will need to update the SHA. +# You can also reference a tag or branch, but the action may change without warning. + +name: Upload Python Package + +on: + release: + types: [published] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c4b05dd..6d389c6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -46,10 +46,10 @@ jobs: run: | python3 setup.py sdist --formats=zip twine check dist/* --strict - python3 -m twine upload dist/* + python3 -m twine upload dist/* --verbose env: TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.TESTPYPI }} + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} TWINE_REPOSITORY: testpypi # Upload to PyPI @@ -58,10 +58,10 @@ jobs: run: | python3 setup.py sdist --formats=zip twine check dist/* --strict - python3 -m twine upload dist/* + python3 -m twine upload dist/* --verbose env: TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI }} + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} TWINE_REPOSITORY: pypi # Make sure package is available on pypi - name: Sleep for 300s to make release available diff --git a/pandahub/__init__.py b/pandahub/__init__.py index 2075905..1cc4a03 100644 --- a/pandahub/__init__.py +++ b/pandahub/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.2.6" +__version__ = "0.2.7" from pandahub.lib.PandaHub import PandaHub, PandaHubError from pandahub.client.PandaHubClient import PandaHubClient diff --git a/pandahub/lib/PandaHub.py b/pandahub/lib/PandaHub.py index 6ba86dc..1fdd9a5 100644 --- a/pandahub/lib/PandaHub.py +++ b/pandahub/lib/PandaHub.py @@ -1421,6 +1421,7 @@ def bulk_write_timeseries_to_db(self, timeseries, data_type, data_type, ts_format=ts_format, compress_ts_data=compress_ts_data, + element_index=col, **args) documents.append(doc) self.bulk_write_to_db(documents, collection_name=collection_name, diff --git a/pandahub/lib/database_toolbox.py b/pandahub/lib/database_toolbox.py index bc533ea..6cc9a86 100644 --- a/pandahub/lib/database_toolbox.py +++ b/pandahub/lib/database_toolbox.py @@ -184,11 +184,12 @@ def create_timeseries_document(timeseries, "compressed_ts_data": compress_ts_data} document = add_timestamp_info_to_document(document, timeseries, ts_format) document = {**document, **kwargs} + if not "_id" in document: # IDs set by users will not be overwritten document["_id"] = get_document_hash(document) + if compress_ts_data: - document["timeseries_data"] = compress_timeseries_data(timeseries, - ts_format) + document["timeseries_data"] = compress_timeseries_data(timeseries, ts_format) else: if ts_format == "timestamp_value": document["timeseries_data"] = convert_timeseries_to_subdocuments(timeseries) diff --git a/pandahub/test/test_client.py b/pandahub/test/test_client.py index 2a6a891..ddb9688 100644 --- a/pandahub/test/test_client.py +++ b/pandahub/test/test_client.py @@ -6,6 +6,7 @@ def test_client_io(phc: PandaHubClient): phc.set_active_project("Manderbach") + net = nw.mv_oberrhein() phc.write_network_to_db(net, name='mv_oberrhein', overwrite=True) net_loaded = phc.get_net_from_db(name='mv_oberrhein') diff --git a/pandahub/test/test_networks.py b/pandahub/test/test_networks.py index 78e6f3a..da140d8 100644 --- a/pandahub/test/test_networks.py +++ b/pandahub/test/test_networks.py @@ -8,6 +8,24 @@ from pandapipes.toolbox import nets_equal +def test_additional_res_tables(ph): + import pandas as pd + ph.set_active_project("pytest") + + # reset project aka delete everything + db = ph._get_project_database() + for cname in db.list_collection_names(): + db.drop_collection(cname) + + net1 = pp.create_empty_network() + net1['res_test'] = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]}) + ph.write_network_to_db(net1, 'test') + net2 = ph.get_net_from_db('test') + + assert('res_test' in net2) + assert(net1.res_test.shape == (2,2)) + + def test_network_io(ph): ph.set_active_project("pytest") # reset project aka delete everything diff --git a/pandahub/test/test_timeseries.py b/pandahub/test/test_timeseries.py index 647e041..3a5570a 100644 --- a/pandahub/test/test_timeseries.py +++ b/pandahub/test/test_timeseries.py @@ -13,8 +13,8 @@ def test_from_tutorial(ph): ph.set_active_project(project) net = nw.simple_mv_open_ring_net() - p_mw_profiles = np.random.randint(low=0, high=100, size=(35040, len(net.load))) / 100 * net.load.p_mw.values - q_mvar_profiles = np.ones((35040, len(net.load))) + p_mw_profiles = np.random.randint(low=0, high=100, size=(35041, len(net.load))) / 100 * net.load.p_mw.values + q_mvar_profiles = np.ones((35041, len(net.load))) timestamps = pd.date_range(start="01/01/2020", end="31/12/2020", freq="15min") p_mw_profiles = pd.DataFrame(p_mw_profiles, index=timestamps) weekindex = p_mw_profiles.index[0:(7 * 96)] @@ -45,10 +45,10 @@ def test_from_tutorial(ph): datetime.datetime(2020, 1, 8, 0, 0))) assert (result.keys() == ['p_mw', 'q_mvar']).all() - assert result.size == 70080 + assert result.size == 70082 assert result.index.dtype == "