diff --git a/.github/workflows/nightly_build.yml b/.github/workflows/nightly_build.yml new file mode 100644 index 00000000..733df177 --- /dev/null +++ b/.github/workflows/nightly_build.yml @@ -0,0 +1,60 @@ +name: nightly_build + +on: + # push: + schedule: + # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule + # 5:24 am UTC (11:24pm MDT the day before) every weekday night in MDT + - cron: '24 5 * * 2-6' + +env: + # Favor_Local_Gems enforces develop branch of all Ruby dependencies + # This is our canary in the coal mine! If any simulation tests fail, comment this and retry. + # If CI is then successful, we have a breaking change in a dependency somewhere. + FAVOR_LOCAL_GEMS: true + GEM_DEVELOPER_KEY: ${{ secrets.GEM_DEVELOPER_KEY }} + UO_NUM_PARALLEL: 2 + # GHA machines only have 2 cores. Trying to run more than that is even slower. + # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources + +jobs: + weeknight-tests: + # ubuntu-latest works since https://github.com/rbenv/ruby-build/releases/tag/v20220710 (July 10, 2022) + # https://github.com/rbenv/ruby-build/discussions/1940 + runs-on: ubuntu-latest + container: + image: docker://nrel/openstudio:3.5.1 + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + # Disco needs python 3.10 + python-version: '3.10' + - name: Install Ruby dependencies + run: | + ruby --version + bundle update + bundle exec certified-update + - name: Install python dependencies + run: bundle exec uo install_python + - name: Run Rspec + continue-on-error: true + # Continue to upload step even if a test fails, so we can troubleshoot + run: bundle exec rspec + - name: Upload artifacts + # Save results for examination - useful for debugging + uses: actions/upload-artifact@v3 + if: failure() # Only upload if rspec fails + with: + name: rspec_results + path: | + spec/test_directory**/run/ + # coverage/ + retention-days: 7 # save for 1 week before deleting + # coveralls action docs: https://github.com/marketplace/actions/coveralls-github-action + # - name: Coveralls + # uses: coverallsapp/github-action@1.1.3 + # with: + # github-token: ${{ secrets.GITHUB_TOKEN }} + # path-to-lcov: "./coverage/lcov/urbanopt-cli.lcov" diff --git a/CHANGELOG.md b/CHANGELOG.md index 96e1205a..00aab945 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## Version 0.9.2 +Date Range: 01/06/23 - 04/11/23: + +- Fixed [#417]( https://github.com/urbanopt/urbanopt-cli/pull/417 ), pin addressable dependency to resolve unicode_normalize error +- Fixed [#397]( https://github.com/urbanopt/urbanopt-cli/pull/397 ), New tests for GEB mappers + ## Version 0.9.1 Date Range: 12/14/22 - 01/05/23: diff --git a/CMakeLists.txt b/CMakeLists.txt index 91c87f8b..ec752cd3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required(VERSION 3.10.2) cmake_policy(SET CMP0048 NEW) -project(URBANoptCLI VERSION 0.8.3) +project(URBANoptCLI VERSION 0.9.1) include(FindOpenStudioSDK.cmake) @@ -89,16 +89,16 @@ option(BUILD_PACKAGE "Build package" OFF) # need to update the MD5sum for each platform and url below if(UNIX) if(APPLE) - set(URBANOPT_CLI_GEMS_ZIP_FILENAME "urbanopt-cli-gems-20221011-darwin.tar.gz") - set(URBANOPT_CLI_GEMS_ZIP_EXPECTED_MD5 "9eee6fb77b168d1b2211e1888f733c63") + set(URBANOPT_CLI_GEMS_ZIP_FILENAME "urbanopt-cli-gems-20230111-darwin.tar.gz") + set(URBANOPT_CLI_GEMS_ZIP_EXPECTED_MD5 "dfb4d2d28d6ff25b42d8e375b4435be2") else() - set(URBANOPT_CLI_GEMS_ZIP_FILENAME "urbanopt-cli-gems-20221011-linux.tar.gz") - set(URBANOPT_CLI_GEMS_ZIP_EXPECTED_MD5 "5d10dc28cca00bbaed4df8ba0a930868") + set(URBANOPT_CLI_GEMS_ZIP_FILENAME "urbanopt-cli-gems-20230111-linux.tar.gz") + set(URBANOPT_CLI_GEMS_ZIP_EXPECTED_MD5 "2edb06d97ea496a3b3929a780404bb05") endif() elseif(WIN32) if(CMAKE_CL_64) - set(URBANOPT_CLI_GEMS_ZIP_FILENAME "urbanopt-cli-gems-20221011-windows.tar.gz") - set(URBANOPT_CLI_GEMS_ZIP_EXPECTED_MD5 "9c9dff191bc91379ce60923bfe6cfa4a") + set(URBANOPT_CLI_GEMS_ZIP_FILENAME "urbanopt-cli-gems-20230111-windows.tar.gz") + set(URBANOPT_CLI_GEMS_ZIP_EXPECTED_MD5 "ecbad071c3aba2223e9ad5803c8004d8") endif() endif() @@ -211,7 +211,7 @@ elseif(UNIX) # set(CPACK_DEBIAN_PACKAGE_DEBUG ON) # These two will set the .deb install path correctly - set(CPACK_DEBIAN_PACKAGE_DEPENDS "libqdbm14,sqlite3,libgomp1") + set(CPACK_DEBIAN_PACKAGE_DEPENDS "libqdbm14,sqlite3,libgomp1,libncurses5") set(CPACK_SET_DESTDIR ON) set(CPACK_INSTALL_PREFIX /usr/local/urbanopt-cli-${URBANOPT_CLI_VERSION}) diff --git a/FindOpenStudioSDK.cmake b/FindOpenStudioSDK.cmake index 4fd44313..802306d9 100644 --- a/FindOpenStudioSDK.cmake +++ b/FindOpenStudioSDK.cmake @@ -1,6 +1,6 @@ set(OPENSTUDIO_VERSION_MAJOR 3) -set(OPENSTUDIO_VERSION_MINOR 4) -set(OPENSTUDIO_VERSION_PATCH 0) +set(OPENSTUDIO_VERSION_MINOR 5) +set(OPENSTUDIO_VERSION_PATCH 1) set(OPENSTUDIO_VERSION "${OPENSTUDIO_VERSION_MAJOR}.${OPENSTUDIO_VERSION_MINOR}.${OPENSTUDIO_VERSION_PATCH}") find_package(openstudio "${OPENSTUDIO_VERSION}" CONFIG) @@ -16,23 +16,23 @@ else() set(OPENSTUDIO_BASELINK "https://openstudio-builds.s3.amazonaws.com/${OPENSTUDIO_VERSION}" CACHE STRING "Base link to where the openstudio archives are hosted" FORCE) - set(OPENSTUDIO_VERSION_SHA "+4bd816f785") + set(OPENSTUDIO_VERSION_SHA "+22e1db7be5") if(APPLE) - set(OPENSTUDIO_EXPECTED_HASH 5a1e5fdfc61a879a9d72dcf625a83e65) - set(OPENSTUDIO_PLATFORM "Darwin") + set(OPENSTUDIO_EXPECTED_HASH f21b03a44aa9ac3e52a4bdfa20009171) + set(OPENSTUDIO_PLATFORM "Darwin-x86_64") set(OPENSTUDIO_EXT "tar.gz") elseif(UNIX) if(LSB_RELEASE_VERSION_SHORT MATCHES "20.04") - set(OPENSTUDIO_EXPECTED_HASH 1922de95bb3e196f1c719400ce58871c) + set(OPENSTUDIO_EXPECTED_HASH 6e5c93002f0cfb445dcdcdb1270261a4) + set(OPENSTUDIO_PLATFORM "Ubuntu-20.04") + else() # Assumes 20.04 + set(OPENSTUDIO_EXPECTED_HASH 6e5c93002f0cfb445dcdcdb1270261a4) set(OPENSTUDIO_PLATFORM "Ubuntu-20.04") - else() # Assumes 18.04 - set(OPENSTUDIO_EXPECTED_HASH 44a837fa96fe2ce1a883492a3a1cae09) - set(OPENSTUDIO_PLATFORM "Ubuntu-18.04") endif() set(OPENSTUDIO_EXT "tar.gz") elseif(WIN32) - set(OPENSTUDIO_EXPECTED_HASH 9adffb37a62721ec51a33ce97533e956) + set(OPENSTUDIO_EXPECTED_HASH bc83efcb140d20f8f9758559a58c4347) set(OPENSTUDIO_PLATFORM "Windows") set(OPENSTUDIO_EXT "tar.gz") endif() diff --git a/README.md b/README.md index 0e5bac4a..0625eeec 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,10 @@ +[![Coverage Status](https://coveralls.io/repos/github/urbanopt/urbanopt-cli/badge.svg?branch=develop)](https://coveralls.io/github/urbanopt/urbanopt-cli?branch=develop) + # URBANopt CLI This is the command line interface (CLI) for the URBANopt™ SDK. -## Installation (Using Ruby) +## Installation (Using Ruby) 2 ) Using ruby add this line to your application's Gemfile: @@ -25,40 +27,40 @@ gem install urbanopt-cli ## Installation (Using Installer) -The UrbanOpt installer is an alternate way to install the UrbanOpt CLI that also includes Ruby 2.5.x and OpenStudio SDK. -Below are installation instructions for each platform. +The UrbanOpt installer is an alternate way to install the UrbanOpt CLI that also includes Ruby 2.5.x and OpenStudio SDK. +Below are installation instructions for each platform. ### Linux (Ubuntu 18.04) -Download the [.deb package](https://docs.urbanopt.net/installation/linux.html#install-with-the-urbanopt-installer). +Download the [.deb package](https://docs.urbanopt.net/installation/linux.html#install-with-the-urbanopt-installer). ```terminal -sudo apt update +sudo apt update sudo apt install ./UrbanOptCLI-0.3.1.b6f118d506-Linux.deb ``` -This will install to `/usr/local/` directory. -e.g. -`/usr/local/urbanopt-cli-0.3.1/` +This will install to `/usr/local/` directory. +e.g. +`/usr/local/urbanopt-cli-0.3.1/` -To run the UrbanOpt CLI, first run the `setup-env.sh` script that generates environmental variables and stores these in `env_uo.sh` in your home directory. +To run the UrbanOpt CLI, first run the `setup-env.sh` script that generates environmental variables and stores these in `env_uo.sh` in your home directory. ```terminal -/usr/local/urbanopt-cli-0.3.1/setup-env.sh +/usr/local/urbanopt-cli-0.3.1/setup-env.sh . ~/.env_uo.sh ``` When launching new shell terminals run `. ~/.env_uo.sh` to setup the environment.  -### Mac OSX (>= 10.12) +### Mac OSX (>= 10.12) -Download the [.dmg package](https://docs.urbanopt.net/installation/mac.html#install-with-the-urbanopt-installer). +Download the [.dmg package](https://docs.urbanopt.net/installation/mac.html#install-with-the-urbanopt-installer). -Use the GUI installer and choose a directory to install. Once installed, open a terminal and run the provided setup script. -The `setup-env.sh` generates env variables and stores them in a file `.env_uo.sh` in your home directory. +Use the GUI installer and choose a directory to install. Once installed, open a terminal and run the provided setup script. +The `setup-env.sh` generates env variables and stores them in a file `.env_uo.sh` in your home directory. -```terminal -/Applications/UrbanOptCLI_0.3.1/setup-env.sh +```terminal +/Applications/UrbanOptCLI_0.3.1/setup-env.sh . ~/.env_uo.sh ``` @@ -66,26 +68,26 @@ When launching new shell terminals run `. ~/.env_uo.s` to setup the environment. ### Windows (64-bit Windows 7 – 10) -Download the [.exe installer](https://docs.urbanopt.net/installation/windows.html#install-with-the-urbanopt-installer). +Download the [.exe installer](https://docs.urbanopt.net/installation/windows.html#install-with-the-urbanopt-installer). Use the GUI installer and choose a directory to install. Once installed, open a terminal (Powershell, Windows CMD and GitBash are supported) and run the provided setup script for that shell (below are the setup scripts for each respective shell environment). #### Bash (or GitBash for Windows) ```terminal -c:/urbanopt-cli-0.3.1/setup-env.sh -. ~/.env_uo.sh +c:/urbanopt-cli-0.3.1/setup-env.sh +. ~/.env_uo.sh ``` #### Powershell ```terminal -c:\urbanopt-cli-0.3.1\setup-env.ps1 -. ~\.env_uo.ps1 +c:\urbanopt-cli-0.3.1\setup-env.ps1 +. ~\.env_uo.ps1 ``` #### Windows Command Prompt ```terminal -c:\urbanopt-cli-0.3.1\setup-env.bat -%HOMEPATH%\.env_uo.bat +c:\urbanopt-cli-0.3.1\setup-env.bat +%HOMEPATH%\.env_uo.bat ``` When launching new shell terminals run the correct environment config to setup the environment.  diff --git a/example_files/Gemfile b/example_files/Gemfile index bfdf0c6e..c14bee61 100644 --- a/example_files/Gemfile +++ b/example_files/Gemfile @@ -25,6 +25,8 @@ allow_local = ENV['FAVOR_LOCAL_GEMS'] # gem 'urbanopt-core', github: 'URBANopt/urbanopt-core-gem', branch: 'develop' # end +# pin this dependency to avoid unicode_normalize error +gem 'addressable', '2.8.1' if allow_local && File.exist?('../openstudio-common-measures-gem') gem 'openstudio-common-measures', path: '../../openstudio-common-measures-gem' diff --git a/example_files/mappers/Baseline.rb b/example_files/mappers/Baseline.rb index 62e9a38b..a5154612 100644 --- a/example_files/mappers/Baseline.rb +++ b/example_files/mappers/Baseline.rb @@ -391,55 +391,55 @@ def get_future_emissions_region(feature) # Options are: AZNMc, CAMXc, ERCTc, FRCCc, MROEc, MROWc, NEWEc, NWPPc, NYSTc, RFCEc, RFCMc, RFCWc, RMPAc, SPNOc, SPSOc, SRMVc, SRMWc, SRSOc, SRTVc, and SRVCc # egrid subregions can map directly to zipcodes but not to states. Some state might include multiple egrid subregions. the default mapper prioritize the egrid subregion that is most common in the state (covers the biggest number of zipcodes) future_emissions_mapping_hash = - { 'FL': 'FRCCc', # ['FRCCc', 'SRSOc'] - 'MS': 'SRMVc', # ['SRMVc', 'SRTVc'] - 'NE': 'MROWc', # ['MROWc', 'RMPAc'] - 'OR': 'NWPPc', - 'CA': 'CAMXc', # ['CAMXc', 'NWPPc'] - 'VA': 'SRVCc', # ['SRVCc', 'RFCWc', 'RFCEc'], - 'AR': 'SRMVc', # ['SRMVc', 'SPSOc'] - 'TX': 'ERCTc', # ['ERCTc', 'SRMVc', 'SPSOc', 'AZNMc'] - 'OH': 'RFCWc', - 'UT': 'NWPPc', - 'MT': 'NWPPc', # ['NWPPc', 'MROWc'] - 'TN': 'SRTVc', - 'ID': 'NWPPc', - 'WI': 'MROEc', # ['RFCWc', 'MROEc', 'MROWc'] - 'WV': 'RFCWc', - 'NC': 'SRVCc', - 'LA': 'SRMVc', - 'IL': 'SRMWc', # ['RFCWc', 'SRMWc'] - 'OK': 'SPSOc', - 'IA': 'MROWc', - 'WA': 'NWPPc', - 'SD': 'MROWc', # ['MROWc', 'RMPAc'] - 'MN': 'MROWc', - 'KY': 'SRTVc', # ['SRTVc', 'RFCWc'] - 'MI': 'RFCMc', # ['RFCMc', 'MROEc'] - 'KS': 'SPNOc', - 'NJ': 'RFCEc', - 'NY': 'NYSTc', - 'IN': 'RFCWc', - 'VT': 'NEWEc', - 'NM': 'AZNMc', # ['AZNMc', 'SPSOc'] - 'WY': 'RMPAc', # ['RMPAc', 'NWPPc'] - 'GA': 'SRSOc', - 'MO': 'SRMWc', # ['SRMWc', 'SPNOc'] - 'DC': 'RFCEc', - 'SC': 'SRVCc', - 'PA': 'RFCEc', # ['RFCEc', 'RFCWc'] - 'CO': 'RMPAc', - 'AZ': 'AZNMc', - 'ME': 'NEWEc', - 'AL': 'SRSOc', - 'MD': 'RFCEc', # ['RFCEc', 'RFCWc'] - 'NH': 'NEWEc', - 'MA': 'NEWEc', - 'ND': 'MROWc', - 'NV': 'NWPPc', # ['NWPPc', 'AZNMc'] - 'CT': 'NEWEc', - 'DE': 'RFCEc', - 'RI': 'NEWEc' } + { FL: 'FRCCc', # ['FRCCc', 'SRSOc'] + MS: 'SRMVc', # ['SRMVc', 'SRTVc'] + NE: 'MROWc', # ['MROWc', 'RMPAc'] + OR: 'NWPPc', + CA: 'CAMXc', # ['CAMXc', 'NWPPc'] + VA: 'SRVCc', # ['SRVCc', 'RFCWc', 'RFCEc'], + AR: 'SRMVc', # ['SRMVc', 'SPSOc'] + TX: 'ERCTc', # ['ERCTc', 'SRMVc', 'SPSOc', 'AZNMc'] + OH: 'RFCWc', + UT: 'NWPPc', + MT: 'NWPPc', # ['NWPPc', 'MROWc'] + TN: 'SRTVc', + ID: 'NWPPc', + WI: 'MROEc', # ['RFCWc', 'MROEc', 'MROWc'] + WV: 'RFCWc', + NC: 'SRVCc', + LA: 'SRMVc', + IL: 'SRMWc', # ['RFCWc', 'SRMWc'] + OK: 'SPSOc', + IA: 'MROWc', + WA: 'NWPPc', + SD: 'MROWc', # ['MROWc', 'RMPAc'] + MN: 'MROWc', + KY: 'SRTVc', # ['SRTVc', 'RFCWc'] + MI: 'RFCMc', # ['RFCMc', 'MROEc'] + KS: 'SPNOc', + NJ: 'RFCEc', + NY: 'NYSTc', + IN: 'RFCWc', + VT: 'NEWEc', + NM: 'AZNMc', # ['AZNMc', 'SPSOc'] + WY: 'RMPAc', # ['RMPAc', 'NWPPc'] + GA: 'SRSOc', + MO: 'SRMWc', # ['SRMWc', 'SPNOc'] + DC: 'RFCEc', + SC: 'SRVCc', + PA: 'RFCEc', # ['RFCEc', 'RFCWc'] + CO: 'RMPAc', + AZ: 'AZNMc', + ME: 'NEWEc', + AL: 'SRSOc', + MD: 'RFCEc', # ['RFCEc', 'RFCWc'] + NH: 'NEWEc', + MA: 'NEWEc', + ND: 'MROWc', + NV: 'NWPPc', # ['NWPPc', 'AZNMc'] + CT: 'NEWEc', + DE: 'RFCEc', + RI: 'NEWEc' } # get the state from weather file state = feature.weather_filename.split('_', -1)[1] @@ -459,55 +459,55 @@ def get_hourly_historical_emissions_region(feature) # Mapping is done using mapping tools from eGrid and AVERT (ZipCode for eGrid and fraction of state for AVERT). # Mapped based on the maps of each set of regions: hourly_historical_mapping_hash = - { 'FL': 'Florida', - 'MS': 'Midwest', - 'NE': 'Midwest', # MRWO could be Midwest / Central - 'OR': 'Northwest', - 'CA': 'California', - 'VA': 'Carolinas', - 'AR': 'Midwest', - 'TX': 'Texas', - 'OH': 'Midwest', # RFCW could be Midwest / Mid Atlantic - 'UT': 'Northwest', - 'MT': 'Northwest', - 'TN': 'Tennessee', - 'ID': 'Northwest', - 'WI': 'Midwest', - 'WV': 'Midwest', # RFCW could be Midwest / Mid Atlantic - 'NC': 'Carolinas', - 'LA': 'Midwest', - 'IL': 'Midwest', - 'OK': 'Central', - 'IA': 'Midwest', # MRWO could be Midwest / Central - 'WA': 'Northwest', - 'SD': 'Midwest', # MRWO could be Midwest / Central - 'MN': 'Midwest', # MRWO could be Midwest / Central - 'KY': 'Tennessee', - 'MI': 'Midwest', - 'KS': 'Central', - 'NJ': 'Mid-Atlantic', - 'NY': 'New York', - 'IN': 'Midwest', # RFCW could be Midwest / Mid Atlantic - 'VT': 'New England', - 'NM': 'Southwest', - 'WY': 'Rocky Mountains', - 'GA': 'SRSO', - 'MO': 'Midwest', - 'DC': 'Mid-Atlantic', - 'SC': 'Carolinas', - 'PA': 'Mid-Atlantic', - 'CO': 'Rocky Mountains', - 'AZ': 'Southwest', - 'ME': 'New England', - 'AL': 'Southeast', - 'MD': 'Mid-Atlantic', - 'NH': 'New England', - 'MA': 'New England', - 'ND': 'Midwest', # MRWO could be Midwest / Central - 'NV': 'Northwest', - 'CT': 'New England', - 'DE': 'Mid-Atlantic', - 'RI': 'New England' } + { FL: 'Florida', + MS: 'Midwest', + NE: 'Midwest', # MRWO could be Midwest / Central + OR: 'Northwest', + CA: 'California', + VA: 'Carolinas', + AR: 'Midwest', + TX: 'Texas', + OH: 'Midwest', # RFCW could be Midwest / Mid Atlantic + UT: 'Northwest', + MT: 'Northwest', + TN: 'Tennessee', + ID: 'Northwest', + WI: 'Midwest', + WV: 'Midwest', # RFCW could be Midwest / Mid Atlantic + NC: 'Carolinas', + LA: 'Midwest', + IL: 'Midwest', + OK: 'Central', + IA: 'Midwest', # MRWO could be Midwest / Central + WA: 'Northwest', + SD: 'Midwest', # MRWO could be Midwest / Central + MN: 'Midwest', # MRWO could be Midwest / Central + KY: 'Tennessee', + MI: 'Midwest', + KS: 'Central', + NJ: 'Mid-Atlantic', + NY: 'New York', + IN: 'Midwest', # RFCW could be Midwest / Mid Atlantic + VT: 'New England', + NM: 'Southwest', + WY: 'Rocky Mountains', + GA: 'SRSO', + MO: 'Midwest', + DC: 'Mid-Atlantic', + SC: 'Carolinas', + PA: 'Mid-Atlantic', + CO: 'Rocky Mountains', + AZ: 'Southwest', + ME: 'New England', + AL: 'Southeast', + MD: 'Mid-Atlantic', + NH: 'New England', + MA: 'New England', + ND: 'Midwest', # MRWO could be Midwest / Central + NV: 'Northwest', + CT: 'New England', + DE: 'Mid-Atlantic', + RI: 'New England' } # get the state from weather file state = feature.weather_filename.split('_', -1)[1] @@ -524,55 +524,55 @@ def get_annual_historical_emissions_region(feature) # Options are: AKGD, AKMS, AZNM, CAMX, ERCT, FRCC, HIMS, HIOA, MROE, MROW, NEWE, NWPP, NYCW, NYLI, NYUP, RFCE, RFCM, RFCW, RMPA, SPNO, SPSO, SRMV, SRMW, SRSO, SRTV, and SRVC # egrid subregions can map directly to zipcodes but not to states. Some state might include multiple egrid subregions. the default mapper prioritize the egrid subregion that is most common in the state (covers the biggest number of zipcodes) annual_historical_mapping_hash = - { 'FL': 'FRCC', - 'MS': 'SRMV', - 'NE': 'MROW', - 'OR': 'NWPP', - 'CA': 'CAMX', - 'VA': 'SRVC', - 'AR': 'SRMV', - 'TX': 'ERCT', - 'OH': 'RFCW', - 'UT': 'NWPP', - 'MT': 'NWPP', - 'TN': 'SRTV', - 'ID': 'NWPP', - 'WI': 'MROE', - 'WV': 'RFCW', - 'NC': 'SRVC', - 'LA': 'SRMV', - 'IL': 'SRMW', - 'OK': 'SPSO', - 'IA': 'MROW', - 'WA': 'NWPP', - 'SD': 'MROW', - 'MN': 'MROW', - 'KY': 'SRTV', - 'MI': 'RFCM', - 'KS': 'SPNO', - 'NJ': 'RFCE', - 'NY': 'NYCW', - 'IN': 'RFCW', - 'VT': 'NEWE', - 'NM': 'AZNM', - 'WY': 'RMPA', - 'GA': 'SRSO', - 'MO': 'SRMW', - 'DC': 'RFCE', - 'SC': 'SRVC', - 'PA': 'RFCE', - 'CO': 'RMPA', - 'AZ': 'AZNM', - 'ME': 'NEWE', - 'AL': 'SRSO', - 'MD': 'RFCE', - 'NH': 'NEWE', - 'MA': 'NEWE', - 'ND': 'MROW', - 'NV': 'NWPP', - 'CT': 'NEWE', - 'DE': 'RFCE', - 'RI': 'NEWE' } + { FL: 'FRCC', + MS: 'SRMV', + NE: 'MROW', + OR: 'NWPP', + CA: 'CAMX', + VA: 'SRVC', + AR: 'SRMV', + TX: 'ERCT', + OH: 'RFCW', + UT: 'NWPP', + MT: 'NWPP', + TN: 'SRTV', + ID: 'NWPP', + WI: 'MROE', + WV: 'RFCW', + NC: 'SRVC', + LA: 'SRMV', + IL: 'SRMW', + OK: 'SPSO', + IA: 'MROW', + WA: 'NWPP', + SD: 'MROW', + MN: 'MROW', + KY: 'SRTV', + MI: 'RFCM', + KS: 'SPNO', + NJ: 'RFCE', + NY: 'NYCW', + IN: 'RFCW', + VT: 'NEWE', + NM: 'AZNM', + WY: 'RMPA', + GA: 'SRSO', + MO: 'SRMW', + DC: 'RFCE', + SC: 'SRVC', + PA: 'RFCE', + CO: 'RMPA', + AZ: 'AZNM', + ME: 'NEWE', + AL: 'SRSO', + MD: 'RFCE', + NH: 'NEWE', + MA: 'NEWE', + ND: 'MROW', + NV: 'NWPP', + CT: 'NEWE', + DE: 'RFCE', + RI: 'NEWE' } # get the state from weather file state = feature.weather_filename.split('_', -1)[1] @@ -880,7 +880,7 @@ def create_osw(scenario, features, feature_names) if !template.nil? && template.include?('Residential IECC') captures = template.match(/Residential IECC (?\d+) - Customizable Template (?\w+) (?\d+)/) - template_vals = Hash[captures.names.zip(captures.captures)] + template_vals = captures.names.zip(captures.captures).to_h template_vals = template_vals.transform_keys(&:to_sym) epw = File.join(File.dirname(__FILE__), '../weather', feature.weather_filename) @@ -994,18 +994,18 @@ def create_osw(scenario, features, feature_names) default_args.each do |arg_name, arg_default| next if arg_default.nil? - if !args.key?(arg_name) - args[arg_name] = arg_default - else + if args.key?(arg_name) if debug - if !arg_default.nil? + if arg_default.nil? + puts "Setting #{arg_name} to '#{args[arg_name]}'." + else if args[arg_name] != arg_default puts "Overriding #{arg_name} default '#{arg_default}' with '#{args[arg_name]}'." end - else - puts "Setting #{arg_name} to '#{args[arg_name]}'." end end + else + args[arg_name] = arg_default end end diff --git a/example_files/mappers/ClassProject.rb b/example_files/mappers/ClassProject.rb index 0adc3d9d..5fe180d1 100644 --- a/example_files/mappers/ClassProject.rb +++ b/example_files/mappers/ClassProject.rb @@ -46,7 +46,7 @@ def create_osw(scenario, features, feature_names) feature = features[0] building_type = feature.building_type - + # Energy Efficiency Measures OpenStudio::Extension.set_measure_argument(osw, 'AddOverhangsByProjectionFactor', '__SKIP__', true) @@ -96,8 +96,8 @@ def create_osw(scenario, features, feature_names) OpenStudio::Extension.set_measure_argument(osw, 'add_hpwh', 'flex1', 'Float') OpenStudio::Extension.set_measure_argument(osw, 'add_hpwh', 'flex_hrs1', '08:01-20:00') - OpenStudio::Extension.set_measure_argument(osw,'add_packaged_ice_storage', '__SKIP__', true) - OpenStudio::Extension.set_measure_argument(osw,'add_packaged_ice_storage', 'ice_cap', 'AutoSize') + OpenStudio::Extension.set_measure_argument(osw, 'add_packaged_ice_storage', '__SKIP__', true) + OpenStudio::Extension.set_measure_argument(osw, 'add_packaged_ice_storage', 'ice_cap', 'AutoSize') OpenStudio::Extension.set_measure_argument(osw, 'ShiftScheduleByType', '__SKIP__', true) OpenStudio::Extension.set_measure_argument(osw, 'ShiftScheduleByType', 'shift_value', -2) diff --git a/example_files/measures/BuildResidentialModel/measure.rb b/example_files/measures/BuildResidentialModel/measure.rb index 09338ca5..53dd5cbf 100644 --- a/example_files/measures/BuildResidentialModel/measure.rb +++ b/example_files/measures/BuildResidentialModel/measure.rb @@ -162,7 +162,9 @@ def run(model, runner, user_arguments) measures = {} hpxml_path = File.expand_path("../#{unit['name']}.xml") - if !unit.key?('hpxml_path') + if unit.key?('hpxml_path') + FileUtils.cp(File.expand_path(unit['hpxml_path']), hpxml_path) + else # BuildResidentialHPXML measure_subdir = 'BuildResidentialHPXML' @@ -196,8 +198,6 @@ def run(model, runner, user_arguments) measure_args.delete('geometry_num_floors_above_grade') measures[measure_subdir] << measure_args - else - FileUtils.cp(File.expand_path(unit['hpxml_path']), hpxml_path) end # BuildResidentialScheduleFile diff --git a/example_files/measures/BuildResidentialModel/resources/util.rb b/example_files/measures/BuildResidentialModel/resources/util.rb index f991973e..01e40dc2 100644 --- a/example_files/measures/BuildResidentialModel/resources/util.rb +++ b/example_files/measures/BuildResidentialModel/resources/util.rb @@ -80,17 +80,17 @@ def self.valid_float?(str) end def self.interp2(x, x0, x1, f0, f1) - ''' + ' Returns the linear interpolation between two results. - ''' + ' return f0 + ((x - x0) / (x1 - x0)) * (f1 - f0) end def self.interp4(x, y, x1, x2, y1, y2, fx1y1, fx1y2, fx2y1, fx2y2) - ''' + ' Returns the bilinear interpolation between four results. - ''' + ' return (fx1y1 / ((x2 - x1) * (y2 - y1))) * (x2 - x) * (y2 - y) \ + (fx2y1 / ((x2 - x1) * (y2 - y1))) * (x - x1) * (y2 - y) \ @@ -99,7 +99,7 @@ def self.interp4(x, y, x1, x2, y1, y2, fx1y1, fx1y2, fx2y1, fx2y2) end def self.biquadratic(x, y, c) - ''' + ' Description: ------------ Calculate the result of a biquadratic polynomial with independent variables @@ -113,7 +113,7 @@ def self.biquadratic(x, y, c) Outputs: -------- z float result of biquadratic polynomial - ''' + ' if c.length != 6 puts 'Error: There must be 6 coefficients in a biquadratic polynomial' end @@ -122,7 +122,7 @@ def self.biquadratic(x, y, c) end def self.quadratic(x, c) - ''' + ' Description: ------------ Calculate the result of a quadratic polynomial with independent variable @@ -138,7 +138,7 @@ def self.quadratic(x, c) Outputs: -------- y float result of biquadratic polynomial - ''' + ' if c.size != 3 puts 'Error: There must be 3 coefficients in a quadratic polynomial' end @@ -148,7 +148,7 @@ def self.quadratic(x, c) end def self.bicubic(x, y, c) - ''' + ' Description: ------------ Calculate the result of a bicubic polynomial with independent variables @@ -166,7 +166,7 @@ def self.bicubic(x, y, c) Outputs: -------- z float result of bicubic polynomial - ''' + ' if c.size != 10 puts 'Error: There must be 10 coefficients in a bicubic polynomial' end @@ -177,7 +177,7 @@ def self.bicubic(x, y, c) end def self.Iterate(x0, f0, x1, f1, x2, f2, icount, cvg) - ''' + ' Description: ------------ Determine if a guess is within tolerance for convergence @@ -225,7 +225,7 @@ def self.Iterate(x0, f0, x1, f1, x2, f2, icount, cvg) else: print "x did NOT converge after", i, "iterations" print "x, when f(x) is", f,"is", x - ''' + ' tolRel = 1e-5 dx = 0.1 @@ -987,16 +987,16 @@ def electricity_heating(custom_meter_infos, unit, thermal_zones) plant_loop.supplyComponents.each do |supply_component| next unless supply_component.to_BoilerHotWater.is_initialized - if units_served.length != 1 # this is a central system - if supply_component.to_BoilerHotWater.get.fuelType == 'Electricity' - custom_meter_infos['Central:ElectricityHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Electric Energy'] - end - custom_meter_infos['Central:ElectricityHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Ancillary Electric Energy'] - else + if units_served.length == 1 if supply_component.to_BoilerHotWater.get.fuelType == 'Electricity' custom_meter_infos["#{unit.name}:ElectricityHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler Electric Energy'] end custom_meter_infos["#{unit.name}:ElectricityHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler Ancillary Electric Energy'] + else # this is a central system + if supply_component.to_BoilerHotWater.get.fuelType == 'Electricity' + custom_meter_infos['Central:ElectricityHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Electric Energy'] + end + custom_meter_infos['Central:ElectricityHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Ancillary Electric Energy'] end end end @@ -1029,16 +1029,16 @@ def electricity_heating(custom_meter_infos, unit, thermal_zones) plant_loop.supplyComponents.each do |supply_component| next unless supply_component.to_BoilerHotWater.is_initialized - if units_served.length != 1 # this is a central system - if supply_component.to_BoilerHotWater.get.fuelType == 'Electricity' - custom_meter_infos['Central:ElectricityHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Electric Energy'] - end - custom_meter_infos['Central:ElectricityHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Ancillary Electric Energy'] - else + if units_served.length == 1 if supply_component.to_BoilerHotWater.get.fuelType == 'Electricity' custom_meter_infos["#{unit.name}:ElectricityHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler Electric Energy'] end custom_meter_infos["#{unit.name}:ElectricityHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler Ancillary Electric Energy'] + else # this is a central system + if supply_component.to_BoilerHotWater.get.fuelType == 'Electricity' + custom_meter_infos['Central:ElectricityHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Electric Energy'] + end + custom_meter_infos['Central:ElectricityHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Ancillary Electric Energy'] end end end @@ -1089,10 +1089,10 @@ def electricity_cooling(custom_meter_infos, unit, thermal_zones) plant_loop.supplyComponents.each do |supply_component| next unless supply_component.to_ChillerElectricEIR.is_initialized - if units_served.length != 1 # this is a central system - custom_meter_infos['Central:ElectricityCooling']['key_var_groups'] << [supply_component.name.to_s, 'Chiller Electric Energy'] - else + if units_served.length == 1 custom_meter_infos["#{unit.name}:ElectricityCooling"]['key_var_groups'] << [supply_component.name.to_s, 'Chiller Electric Energy'] + else # this is a central system + custom_meter_infos['Central:ElectricityCooling']['key_var_groups'] << [supply_component.name.to_s, 'Chiller Electric Energy'] end end end @@ -1317,10 +1317,10 @@ def natural_gas_heating(custom_meter_infos, unit, thermal_zones) next unless supply_component.to_BoilerHotWater.is_initialized next if supply_component.to_BoilerHotWater.get.fuelType != 'NaturalGas' - if units_served.length != 1 # this is a central system - custom_meter_infos['Central:NaturalGasHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Gas Energy'] - else + if units_served.length == 1 custom_meter_infos["#{unit.name}:NaturalGasHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler Gas Energy'] + else # this is a central system + custom_meter_infos['Central:NaturalGasHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Gas Energy'] end end end @@ -1350,10 +1350,10 @@ def natural_gas_heating(custom_meter_infos, unit, thermal_zones) next unless supply_component.to_BoilerHotWater.is_initialized next if supply_component.to_BoilerHotWater.get.fuelType != 'NaturalGas' - if units_served.length != 1 # this is a central system - custom_meter_infos['Central:NaturalGasHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Gas Energy'] - else + if units_served.length == 1 custom_meter_infos["#{unit.name}:NaturalGasHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler Gas Energy'] + else # this is a central system + custom_meter_infos['Central:NaturalGasHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Gas Energy'] end end end @@ -1446,10 +1446,10 @@ def fuel_oil_heating(custom_meter_infos, unit, thermal_zones) next unless supply_component.to_BoilerHotWater.is_initialized next if supply_component.to_BoilerHotWater.get.fuelType != 'FuelOil#1' - if units_served.length != 1 # this is a central system - custom_meter_infos['Central:FuelOilHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler FuelOil#1 Energy'] - else + if units_served.length == 1 custom_meter_infos["#{unit.name}:FuelOilHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler FuelOil#1 Energy'] + else # this is a central system + custom_meter_infos['Central:FuelOilHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler FuelOil#1 Energy'] end end end @@ -1479,10 +1479,10 @@ def fuel_oil_heating(custom_meter_infos, unit, thermal_zones) next unless supply_component.to_BoilerHotWater.is_initialized next if supply_component.to_BoilerHotWater.get.fuelType != 'FuelOil#1' - if units_served.length != 1 # this is a central system - custom_meter_infos['Central:FuelOilHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler FuelOil#1 Energy'] - else + if units_served.length == 1 custom_meter_infos["#{unit.name}:FuelOilHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler FuelOil#1 Energy'] + else # this is a central system + custom_meter_infos['Central:FuelOilHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler FuelOil#1 Energy'] end end end @@ -1549,10 +1549,10 @@ def propane_heating(custom_meter_infos, unit, thermal_zones) next unless supply_component.to_BoilerHotWater.is_initialized next if supply_component.to_BoilerHotWater.get.fuelType != 'PropaneGas' - if units_served.length != 1 # this is a central system - custom_meter_infos['Central:PropaneHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Propane Energy'] - else + if units_served.length == 1 custom_meter_infos["#{unit.name}:PropaneHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler Propane Energy'] + else # this is a central system + custom_meter_infos['Central:PropaneHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Propane Energy'] end end end @@ -1582,10 +1582,10 @@ def propane_heating(custom_meter_infos, unit, thermal_zones) next unless supply_component.to_BoilerHotWater.is_initialized next if supply_component.to_BoilerHotWater.get.fuelType != 'PropaneGas' - if units_served.length != 1 # this is a central system - custom_meter_infos['Central:PropaneHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Propane Energy'] - else + if units_served.length == 1 custom_meter_infos["#{unit.name}:PropaneHeating"]['key_var_groups'] << [supply_component.name.to_s, 'Boiler Propane Energy'] + else # this is a central system + custom_meter_infos['Central:PropaneHeating']['key_var_groups'] << [supply_component.name.to_s, 'Boiler Propane Energy'] end end end diff --git a/lib/uo_cli.rb b/lib/uo_cli.rb old mode 100644 new mode 100755 index 7f6fa758..8535f8e6 --- a/lib/uo_cli.rb +++ b/lib/uo_cli.rb @@ -453,26 +453,26 @@ def opt_des_run # One solution would be changing scenario_file to feature. # Would that be confusing when creating a ScenarioFile from the FeatureFile? if @opthash.subopts[:feature] - @feature_path, @feature_name = File.split(File.expand_path(@opthash.subopts[:feature])) + @feature_path, @feature_name = Pathname(File.expand_path(@opthash.subopts[:feature])).split end if @opthash.subopts[:scenario] - @root_dir, @scenario_file_name = File.split(File.expand_path(@opthash.subopts[:scenario])) + @root_dir, @scenario_file_name = Pathname(File.expand_path(@opthash.subopts[:scenario])).split @scenario_name = File.basename(@scenario_file_name, File.extname(@scenario_file_name)) end # Simulate energy usage as defined by ScenarioCSV def self.run_func - run_dir = File.join(@root_dir, 'run', @scenario_name.downcase) - csv_file = File.join(@root_dir, @scenario_file_name) - featurefile = File.join(@root_dir, @feature_name) - mapper_files_dir = File.join(@root_dir, 'mappers') - reopt_files_dir = File.join(@root_dir, 'reopt/') + run_dir = @root_dir / 'run' / @scenario_name.downcase + csv_file = @root_dir / @scenario_file_name + featurefile = @root_dir / @feature_name + mapper_files_dir = @root_dir / 'mappers' + reopt_files_dir = @root_dir / 'reopt/' num_header_rows = 1 if @feature_id - feature_run_dir = File.join(run_dir, @feature_id) + feature_run_dir = run_dir / @feature_id # If run folder for feature exists, remove it - FileUtils.rm_rf(feature_run_dir) if File.exist?(feature_run_dir) + FileUtils.rm_rf(feature_run_dir) if feature_run_dir.exist? end feature_file = URBANopt::GeoJSON::GeoFile.from_file(featurefile) @@ -578,6 +578,26 @@ def self.create_reopt_scenario_file(existing_scenario_file) end end + # Change num_parallel in runner.conf to set number of cores to use when running simulations + # This function is called during project_dir creation/updating so users aren't surprised if they look at the config file + def self.use_num_parallel(project_dir) + if ENV['UO_NUM_PARALLEL'] || @opthash.subopts[:num_parallel] + runner_file_path = Pathname(project_dir) / 'runner.conf' + runner_conf_hash = JSON.parse(File.read(runner_file_path)) + if @opthash.subopts[:num_parallel] + runner_conf_hash['num_parallel'] = @opthash.subopts[:num_parallel] + File.open(runner_file_path, 'w+') do |f| + f << runner_conf_hash.to_json + end + elsif ENV['UO_NUM_PARALLEL'] + runner_conf_hash['num_parallel'] = ENV['UO_NUM_PARALLEL'].to_i + File.open(runner_file_path, 'w+') do |f| + f << runner_conf_hash.to_json + end + end + end + end + # Create project folder # params\ # +dir_name+:: _string_ Name of new project folder @@ -617,15 +637,7 @@ def self.create_project_folder(dir_name, empty_folder: false, overwrite_project: # copy config file FileUtils.cp(File.join(path_item, 'runner.conf'), dir_name) - # If the env var is set, change the num_parallel value to be what the env var is set to - if ENV['UO_NUM_PARALLEL'] - runner_file_path = File.join(dir_name, 'runner.conf') - runner_conf_hash = JSON.parse(File.read(runner_file_path)) - runner_conf_hash['num_parallel'] = ENV['UO_NUM_PARALLEL'].to_i - File.open(runner_file_path, 'w+') do |f| - f << runner_conf_hash.to_json - end - end + use_num_parallel(dir_name) # copy gemfile FileUtils.cp(File.join(path_item, 'Gemfile'), dir_name) @@ -681,6 +693,9 @@ def self.create_project_folder(dir_name, empty_folder: false, overwrite_project: FileUtils.cp(File.join(path_item, 'mappers/ThermalStorage.rb'), File.join(dir_name, 'mappers')) FileUtils.cp(File.join(path_item, 'mappers/EvCharging.rb'), File.join(dir_name, 'mappers')) FileUtils.cp(File.join(path_item, 'mappers/FlexibleHotWater.rb'), File.join(dir_name, 'mappers')) + FileUtils.cp(File.join(path_item, 'mappers/ChilledWaterStorage.rb'), File.join(dir_name, 'mappers')) + FileUtils.cp(File.join(path_item, 'mappers/PeakHoursThermostatAdjust.rb'), File.join(dir_name, 'mappers')) + FileUtils.cp(File.join(path_item, 'mappers/PeakHoursMelsShedding.rb'), File.join(dir_name, 'mappers')) # copy osw file FileUtils.cp(File.join(path_item, 'mappers/base_workflow.osw'), File.join(dir_name, 'mappers')) @@ -811,19 +826,10 @@ def self.update_project(existing_project_folder, new_project_directory) # copy config file FileUtils.cp_r(File.join(path_item, 'runner.conf'), new_path, remove_destination: true) - # If the env var is set, change the num_parallel value to be what the env var is set to - # TODO: make this into a function...it's used in 2 places - if ENV['UO_NUM_PARALLEL'] - runner_file_path = File.join(new_path, 'runner.conf') - runner_conf_hash = JSON.parse(File.read(runner_file_path)) - runner_conf_hash['num_parallel'] = ENV['UO_NUM_PARALLEL'].to_i - File.open(runner_file_path, 'w+') do |f| - f << runner_conf_hash.to_json - end - end + use_num_parallel(new_path) # Replace standard mappers - # Note: this also copies createBar and Floorspace without checking project type (for now) + # FIXME: this also copies createBar and Floorspace without checking project type (for now) mappers = File.join(path_item, 'mappers') Pathname.new(mappers).children.each { |mapper| FileUtils.cp_r(mapper, File.join(new_path, 'mappers'), remove_destination: true) } @@ -920,7 +926,7 @@ def self.setup_python_variables # Return UO python packages list from python_deps/dependencies.json def self.get_python_deps deps = [] - the_path = "" + the_path = '' $LOAD_PATH.each do |path_item| if path_item.to_s.end_with?('example_files') # install python in cli gem's example_files/python_deps folder @@ -980,18 +986,18 @@ def self.check_python(python_only: false) puts "DEPENDENCIES RETRIEVED FROM FILE: #{deps}" errors = [] deps.each do |dep| - #TODO: Update when there is a stable release for DISCO - if dep[:name].to_s.include? "disco" + # TODO: Update when there is a stable release for DISCO + if dep[:name].to_s.include? 'disco' stdout, stderr, status = Open3.capture3("#{pvars[:pip_path]} show NREL-disco") - else + else stdout, stderr, status = Open3.capture3("#{pvars[:pip_path]} show #{dep[:name]}") end if stderr.empty? # check versions - m = stdout.match /^Version: (\S{3,}$)/ + m = stdout.match(/^Version: (\S{3,}$)/) err = true - if m and m.size > 1 - if !dep[:version].nil? and dep[:version].to_s == m[1].to_s + if m && m.size > 1 + if !dep[:version].nil? && dep[:version].to_s == m[1].to_s puts "...#{dep[:name]} found with specified version #{dep[:version]}" err = false elsif dep[:version].nil? @@ -1107,14 +1113,14 @@ def self.install_python_dependencies deps = get_python_deps deps.each do |dep| puts "Installing #{dep[:name]}..." - the_command = "" + the_command = '' if dep[:version].nil? the_command = "#{pvars[:pip_path]} install #{dep[:name]}" else the_command = "#{pvars[:pip_path]} install #{dep[:name]}~=#{dep[:version]}" end # system(the_command) - #puts "INSTALL COMMAND: #{the_command}" + # puts "INSTALL COMMAND: #{the_command}" stdout, stderr, status = Open3.capture3(the_command) if stderr && !stderr == '' puts "Error installing: #{stderr}" @@ -1197,7 +1203,7 @@ def self.install_python_dependencies @opthash.subopts[:scenario_file].nil? && @opthash.subopts[:reopt_scenario_file].nil? && @opthash.subopts[:project_folder].nil? - abort("\nNo options provided to the `create` command. Did you forget the `-p` flag? See `uo create --help` for all options\n") + abort("\nNo options provided for the `create` command. Did you forget a flag? Perhaps `-p`? See `uo create --help` for all options\n") end # Update existing URBANopt Project files @@ -1217,26 +1223,10 @@ def self.install_python_dependencies # Run simulations if @opthash.command == 'run' && @opthash.subopts[:scenario] && @opthash.subopts[:feature] - # Change num_parallel in runner.conf - Use case is for CI to use more cores - # If set by env variable, use that, otherwise use what the user specified in the cli - if ENV['UO_NUM_PARALLEL'] || @opthash.subopts[:num_parallel] - runner_file_path = File.join(@root_dir, 'runner.conf') - runner_conf_hash = JSON.parse(File.read(runner_file_path)) - if @opthash.subopts[:num_parallel] - runner_conf_hash['num_parallel'] = @opthash.subopts[:num_parallel] - File.open(runner_file_path, 'w+') do |f| - f << runner_conf_hash.to_json - end - elsif ENV['UO_NUM_PARALLEL'] - runner_conf_hash['num_parallel'] = ENV['UO_NUM_PARALLEL'].to_i - File.open(runner_file_path, 'w+') do |f| - f << runner_conf_hash.to_json - end - end - end + use_num_parallel(@root_dir) if @opthash.subopts[:scenario].to_s.include? '-' - @feature_id = (@feature_name.split(/\W+/)[1]).to_s + @feature_id = (@feature_name.to_s.split(/\W+/)[1]) end puts "\nSimulating features of '#{@feature_name}' as directed by '#{@scenario_file_name}'...\n\n" @@ -1414,12 +1404,12 @@ def self.install_python_dependencies commands.each do |command| # TODO: This will be updated so stderr only reports error/warnings at DISCO level stdout, stderr, status = Open3.capture3(command) - if !stderr.empty? + if !stderr.empty? puts "ERROR running DISCO: #{stderr}" end end - puts "Refer to detailed log file #{File.join(run_folder,'disco','run_upgrade_cost_analysis.log')} for more information on the run." - puts "Refer to the output summary file #{File.join(run_folder,'disco','output_summary.json')} for a summary of the results." + puts "Refer to detailed log file #{File.join(run_folder, 'disco', 'run_upgrade_cost_analysis.log')} for more information on the run." + puts "Refer to the output summary file #{File.join(run_folder, 'disco', 'output_summary.json')} for a summary of the results." end end @@ -1459,7 +1449,7 @@ def self.install_python_dependencies # Post-process the scenario if @opthash.command == 'process' - if @opthash.subopts[:default] == false && @opthash.subopts[:opendss] == false && @opthash.subopts[:reopt_scenario] == false && @opthash.subopts[:reopt_feature] == false && @opthash.subopts[:disco] == false + if @opthash.subopts[:default] == false && @opthash.subopts[:opendss] == false && @opthash.subopts[:reopt_scenario] == false && @opthash.subopts[:reopt_feature] == false && @opthash.subopts[:disco] == false abort("\nERROR: No valid process type entered. Must enter a valid process type\n") end @@ -1720,12 +1710,17 @@ def self.install_python_dependencies abort("\nERROR: Units type not recognized. Please use a valid option in the CLI") end building_type = feature[:properties][:building_type] # From FeatureFile - if feature_eui_value > validation_params['EUI'][@opthash.subopts[:units]][building_type]['max'] - puts "\nFeature #{File.basename(feature_path)} EUI of #{feature_eui_value.round(2)} #{unit_value} is greater than the validation maximum." - elsif feature_eui_value < validation_params['EUI'][@opthash.subopts[:units]][building_type]['min'] - puts "\nFeature #{File.basename(feature_path)} (#{building_type}) EUI of #{feature_eui_value.round(2)} #{unit_value} is less than the validation minimum." + validation_upper_limit = validation_params['EUI'][@opthash.subopts[:units]][building_type]['max'] + validation_lower_limit = validation_params['EUI'][@opthash.subopts[:units]][building_type]['min'] + if feature_eui_value > validation_upper_limit + puts "\nFeature #{File.basename(feature_path)} (#{building_type}) EUI of #{feature_eui_value.round(2)} #{unit_value} " \ + "is greater than the validation maximum of #{validation_upper_limit}." + elsif feature_eui_value < validation_lower_limit + puts "\nFeature #{File.basename(feature_path)} (#{building_type}) EUI of #{feature_eui_value.round(2)} #{unit_value} " \ + "is less than the validation minimum of #{validation_lower_limit}." else - puts "\nFeature #{File.basename(feature_path)} (#{building_type}) EUI of #{feature_eui_value.round(2)} #{unit_value} is within bounds set by #{validation_file_name}." + puts "\nFeature #{File.basename(feature_path)} (#{building_type}) EUI of #{feature_eui_value.round(2)} #{unit_value} " \ + "is within bounds set by #{validation_file_name} (#{validation_lower_limit} - #{validation_upper_limit})." end end end diff --git a/lib/uo_cli/version.rb b/lib/uo_cli/version.rb index 5ab336c9..3a3a4cf4 100644 --- a/lib/uo_cli/version.rb +++ b/lib/uo_cli/version.rb @@ -40,6 +40,6 @@ module URBANopt module CLI - VERSION = '0.9.1'.freeze + VERSION = '0.9.2'.freeze end end diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index aae48d39..00000000 --- a/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -geojson-modelica-translator~=0.2.2 -urbanopt-ditto-reader~=0.3.8 diff --git a/spec/spec_files/two_building_flexible_hot_water.csv b/spec/spec_files/two_building_flexible_hot_water.csv new file mode 100644 index 00000000..fb6eaaf7 --- /dev/null +++ b/spec/spec_files/two_building_flexible_hot_water.csv @@ -0,0 +1,3 @@ +Feature Id,Feature Name,Mapper Class,REopt Assumptions +2,Restaurant 1,URBANopt::Scenario::FlexibleHotWaterMapper,multiPV_assumptions.json +5,District Office 1,URBANopt::Scenario::FlexibleHotWaterMapper,multiPV_assumptions.json diff --git a/spec/spec_files/two_building_floorspace.csv b/spec/spec_files/two_building_floorspace.csv index 7b0a929f..0a70ad50 100644 --- a/spec/spec_files/two_building_floorspace.csv +++ b/spec/spec_files/two_building_floorspace.csv @@ -1,3 +1,3 @@ Feature Id,Feature Name,Mapper Class,REopt Assumptions -5,District Office 1,URBANopt::Scenario::FloorspaceMapper,multiPV_assumptions.json +5,Restaurant 14,URBANopt::Scenario::FloorspaceMapper,multiPV_assumptions.json 7,Office 1,URBANopt::Scenario::FloorspaceMapper,multiPV_assumptions.json diff --git a/spec/spec_files/two_building_res.csv b/spec/spec_files/two_building_res.csv index 640c8b33..535cb596 100644 --- a/spec/spec_files/two_building_res.csv +++ b/spec/spec_files/two_building_res.csv @@ -1,3 +1,3 @@ Feature Id,Feature Name,Mapper Class,REopt Assumptions -5,District Office 1,URBANopt::Scenario::BaselineMapper,multiPV_assumptions.json +5,Restaurant 14,URBANopt::Scenario::BaselineMapper,multiPV_assumptions.json 16,Residential 3,URBANopt::Scenario::BaselineMapper,multiPV_assumptions.json diff --git a/spec/spec_files/two_building_res_chilled_water_scenario.csv b/spec/spec_files/two_building_res_chilled_water_scenario.csv new file mode 100644 index 00000000..fffb6692 --- /dev/null +++ b/spec/spec_files/two_building_res_chilled_water_scenario.csv @@ -0,0 +1,3 @@ +Feature Id,Feature Name,Mapper Class,REopt Assumptions +5,Restaurant 14,URBANopt::Scenario::ChilledWaterStorageMapper,multiPV_assumptions.json +16,Residential 3,URBANopt::Scenario::ChilledWaterStorageMapper,multiPV_assumptions.json diff --git a/spec/spec_files/two_building_res_peak_hours_mel_reduction.csv b/spec/spec_files/two_building_res_peak_hours_mel_reduction.csv new file mode 100644 index 00000000..e58dd8ee --- /dev/null +++ b/spec/spec_files/two_building_res_peak_hours_mel_reduction.csv @@ -0,0 +1,3 @@ +Feature Id,Feature Name,Mapper Class,REopt Assumptions +5,Restaurant 14,URBANopt::Scenario::PeakHoursMelsSheddingMapper,multiPV_assumptions.json +16,Residential 3,URBANopt::Scenario::PeakHoursMelsSheddingMapper,multiPV_assumptions.json diff --git a/spec/spec_files/two_building_res_stat_adjustment.csv b/spec/spec_files/two_building_res_stat_adjustment.csv new file mode 100644 index 00000000..926c46d2 --- /dev/null +++ b/spec/spec_files/two_building_res_stat_adjustment.csv @@ -0,0 +1,3 @@ +Feature Id,Feature Name,Mapper Class,REopt Assumptions +5,Restaurant 14,URBANopt::Scenario::PeakHoursThermostatAdjustMapper,multiPV_assumptions.json +16,Residential 3,URBANopt::Scenario::PeakHoursThermostatAdjustMapper,multiPV_assumptions.json diff --git a/spec/spec_files/two_building_thermal_storage_scenario.csv b/spec/spec_files/two_building_thermal_storage_scenario.csv new file mode 100644 index 00000000..7e5038a1 --- /dev/null +++ b/spec/spec_files/two_building_thermal_storage_scenario.csv @@ -0,0 +1,3 @@ +Feature Id,Feature Name,Mapper Class,REopt Assumptions +1,Mixed_use 1,URBANopt::Scenario::ThermalStorageMapper,multiPV_assumptions.json +12,Mall 1,URBANopt::Scenario::ThermalStorageMapper,multiPV_assumptions.json diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index e4d4e2d8..75a12949 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -38,11 +38,21 @@ # OF THE POSSIBILITY OF SUCH DAMAGE. # ********************************************************************************* +# require 'simplecov' +# require 'simplecov-lcov' + +# SimpleCov::Formatter::LcovFormatter.config.report_with_single_file = true +# SimpleCov.formatter = SimpleCov::Formatter::LcovFormatter +# # Don't consider the spec folder for test coverage reporting (inside the do/end loop) +# SimpleCov.start do +# add_filter '/spec/' +# end + require 'bundler/setup' RSpec.configure do |config| # Recording test status enables flags like --only-failures and --next-failure - config.example_status_persistence_file_path = 'spec/.rspec_status' + config.example_status_persistence_file_path = '.rspec_status' # Disable RSpec exposing methods globally on `Module` and `main` config.disable_monkey_patching! diff --git a/spec/uo_cli_spec.rb b/spec/uo_cli_spec.rb index 42071895..93c9d710 100644 --- a/spec/uo_cli_spec.rb +++ b/spec/uo_cli_spec.rb @@ -41,26 +41,33 @@ require 'json' RSpec.describe URBANopt::CLI do - test_directory = File.join('spec', 'test_directory') - test_directory_res = File.join('spec', 'test_directory_res') - test_directory_elec = File.join('spec', 'test_directory_elec') - test_directory_disco = File.join('spec', 'test_directory_disco') - test_directory_pv = File.join('spec', 'test_directory_pv') - test_scenario = File.join(test_directory, 'two_building_scenario.csv') - test_scenario_res = File.join(test_directory_res, 'two_building_res.csv') - test_reopt_scenario = File.join(test_directory_pv, 'REopt_scenario.csv') - test_scenario_pv = File.join(test_directory_pv, 'two_building_scenario.csv') - test_scenario_elec = File.join(test_directory_elec, 'electrical_scenario.csv') - test_scenario_disco = File.join(test_directory_disco, 'electrical_scenario.csv') - test_ev_scenario = File.join(test_directory, 'two_building_ev_scenario.csv') - test_feature = File.join(test_directory, 'example_project.json') - test_feature_res = File.join(test_directory_res, 'example_project_combined.json') - test_feature_elec = File.join(test_directory_elec, 'example_project_with_electric_network.json') - test_feature_disco = File.join(test_directory_disco, 'example_project_with_electric_network.json') - test_feature_pv = File.join(test_directory_pv, 'example_project_with_PV.json') - test_feature_rnm = File.join(test_directory, 'example_project_with_streets.json') - test_validate_bounds = File.join(test_directory_res, 'out_of_bounds_validation.yaml') - test_reopt_scenario_assumptions_file = File.join(test_directory_pv, 'reopt', 'multiPV_assumptions.json') + example_dir = Pathname(__FILE__).dirname.parent / 'example_files' + spec_dir = Pathname(__FILE__).dirname + test_directory = spec_dir / 'test_directory' + test_directory_res = spec_dir / 'test_directory_res' + test_directory_elec = spec_dir / 'test_directory_elec' + test_directory_disco = spec_dir / 'test_directory_disco' + test_directory_pv = spec_dir / 'test_directory_pv' + test_scenario = test_directory / 'two_building_scenario.csv' + test_scenario_res = test_directory_res / 'two_building_res' + test_reopt_scenario = test_directory_pv / 'REopt_scenario.csv' + test_scenario_pv = test_directory_pv / 'two_building_scenario.csv' + test_scenario_elec = test_directory_elec / 'electrical_scenario.csv' + test_scenario_disco = test_directory_disco / 'electrical_scenario.csv' + test_ev_scenario = test_directory / 'two_building_ev_scenario.csv' + test_scenario_chilled = test_directory_res / 'two_building_chilled.csv' + test_scenario_mels_reduction = test_directory_res / 'two_building_mels_reduction.csv' + test_scenario_stat_adjustment = test_directory_res / 'two_building_stat_adjustment.csv' + test_scenario_flexible_hot_water = test_directory / 'two_building_flexible_hot_water.csv' + test_scenario_thermal_storage = test_directory / 'two_building_thermal_storage.csv' + test_feature = test_directory / 'example_project.json' + test_feature_res = test_directory_res / 'example_project_combined.json' + test_feature_elec = test_directory_elec / 'example_project_with_electric_network.json' + test_feature_disco = test_directory_disco / 'example_project_with_electric_network.json' + test_feature_pv = test_directory_pv / 'example_project_with_PV.json' + test_feature_rnm = test_directory / 'example_project_with_streets.json' + test_validate_bounds = test_directory_res / 'out_of_bounds_validation.yaml' + test_reopt_scenario_assumptions_file = test_directory_pv / 'reopt' / 'multiPV_assumptions.json' call_cli = 'bundle exec uo' # Ensure clean slate for testing @@ -71,6 +78,27 @@ def delete_directory_or_file(dir_or_file) end end + # Look through the workflow file and activate certain measures + # params\ + # +test_dir+:: _path_ Path to the test directory being used + # +measure_name_list+:: _array_ Measure dir_names - present in the named workflow file + # +workflow+:: _string_ Name of the workflow file (found in project_dir/mappers) to search for measures + # + # This function toggles the __SKIP__ argument of measures + def select_measures(test_dir, measure_name_list, workflow = 'base_workflow.osw', skip_setting: false) + # FIXME: More clear argument name than `skip_setting`. It is changing the value of the __SKIP__ argument in the measure. + base_workflow_path = test_dir / 'mappers' / workflow + base_workflow_hash = JSON.parse(File.read(base_workflow_path)) + base_workflow_hash['steps'].each do |measure| + if measure_name_list.include? measure['measure_dir_name'] + measure['arguments']['__SKIP__'] = skip_setting + end + File.open(base_workflow_path, 'w+') do |f| + f << base_workflow_hash.to_json + end + end + end + context 'Admin' do it 'displays the correct version number' do expect { system("#{call_cli} --version") } @@ -91,7 +119,7 @@ def delete_directory_or_file(dir_or_file) end it 'returns graceful error message if dir passed to "create -s" command' do - unless Dir.exist?(File.expand_path(test_directory)) + unless test_directory.exist? system("#{call_cli} create --project-folder #{test_directory}") end expect { system("#{call_cli} create -s #{test_directory}") } @@ -100,7 +128,7 @@ def delete_directory_or_file(dir_or_file) end it 'returns graceful error message if non-json file passed to create -s command' do - unless Dir.exist?(File.expand_path(test_directory)) + unless test_directory.exist? system("#{call_cli} create --project-folder #{test_directory}") end expect { system("#{call_cli} create -s #{test_directory}/validation_schema.yaml") } @@ -109,7 +137,7 @@ def delete_directory_or_file(dir_or_file) end it 'returns graceful error message if invalid json file passed to create -s command' do - unless Dir.exist?(File.expand_path(test_directory)) + unless test_directory.exist? system("#{call_cli} create --project-folder #{test_directory}") end expect { system("#{call_cli} create -s #{test_directory}/runner.conf") } @@ -147,59 +175,59 @@ def delete_directory_or_file(dir_or_file) it 'creates an example project directory' do system("#{call_cli} create --project-folder #{test_directory}") - expect(File.exist?(test_feature)).to be true - expect(File.exist?(File.join(test_directory, 'mappers/Baseline.rb'))).to be true + expect(test_feature.exist?).to be true + expect((test_directory / 'mappers' / 'Baseline.rb').exist?).to be true end it 'creates an example project directory when create bar geometry method specified' do system("#{call_cli} create --project-folder #{test_directory} --create-bar") - expect(File.exist?(File.join(test_directory, 'mappers/CreateBar.rb'))).to be true - expect(File.exist?(File.join(test_directory, 'mappers/createbar_workflow.osw'))).to be true + expect((test_directory / 'mappers' / 'CreateBar.rb').exist?).to be true + expect((test_directory / 'mappers' / 'createbar_workflow.osw').exist?).to be true end it 'creates an example project directory when floorspace method specified' do system("#{call_cli} create --project-folder #{test_directory} --floorspace") - expect(File.exist?(File.join(test_directory, 'mappers/Floorspace.rb'))).to be true - expect(File.exist?(File.join(test_directory, 'example_floorspace_project.json'))).to be true + expect((test_directory / 'mappers' / 'Floorspace.rb').exist?).to be true + expect((test_directory / 'example_floorspace_project.json').exist?).to be true end it 'creates an example project directory for combined residential and commercial workflow' do system("#{call_cli} create --project-folder #{test_directory_res} --combined") - expect(File.exist?(File.join(test_directory_res, 'mappers/residential'))).to be true - expect(File.exist?(test_feature_res)).to be true - expect(File.exist?(File.join(test_directory_res, 'measures'))).to be true - expect(File.exist?(File.join(test_directory_res, 'resources'))).to be true + expect((test_directory_res / 'mappers' / 'residential').exist?).to be true + expect(test_feature_res.exist?).to be true + expect((test_directory_res / 'measures').exist?).to be true + expect((test_directory_res / 'resources').exist?).to be true end it 'creates an example project directory with electrical network properties' do system("#{call_cli} create --project-folder #{test_directory_elec} --electric") - expect(File.exist?(test_feature_elec)).to be true + expect(test_feature_elec.exist?).to be true end it 'creates an example project directory with electrical network properties and disco workflow' do system("#{call_cli} create --project-folder #{test_directory_disco} --disco") - expect(File.exist?(test_feature_disco)).to be true + expect(test_feature_disco.exist?).to be true end it 'creates an example project directory with PV' do system("#{call_cli} create --project-folder #{test_directory_pv} --photovoltaic") - expect(File.exist?(test_feature_pv)).to be true + expect(test_feature_pv.exist?).to be true end it 'creates an example project directory for rnm workflow' do system("#{call_cli} create --project-folder #{test_directory} --streets") - expect(File.exist?(test_feature_rnm)).to be true + expect(test_feature_rnm.exist?).to be true end it 'creates an empty project directory' do system("#{call_cli} create --empty --project-folder #{test_directory}") - expect(File.exist?(test_feature)).to be false - expect(File.exist?(File.join(test_directory, 'mappers', 'Baseline.rb'))).to be true + expect(test_feature.exist?).to be false + expect((test_directory / 'mappers' / 'Baseline.rb').exist?).to be true end it 'does not overwrite a project directory without --overwrite' do system("#{call_cli} create --project-folder #{test_directory}") - expect(File.exist?(test_feature)).to be true + expect(test_feature.exist?).to be true expect { system("#{call_cli} create --project-folder #{test_directory}") } .to output(a_string_including('already a directory here')) .to_stderr_from_any_process @@ -207,28 +235,31 @@ def delete_directory_or_file(dir_or_file) it 'overwrites a project directory with --overwrite' do system("#{call_cli} create --project-folder #{test_directory}") - expect(File.exist?(test_feature)).to be true + expect(test_feature.exist?).to be true expect { system("#{call_cli} create --overwrite --project-folder #{test_directory}") } .to output(a_string_including('Overwriting')) .to_stdout_from_any_process - expect(File.exist?(test_feature)).to be true + expect(test_feature.exist?).to be true end it 'overwrites an existing project directory with an empty directory' do system("#{call_cli} create --project-folder #{test_directory}") - expect(File.exist?(test_feature)).to be true + expect(test_feature.exist?).to be true system("#{call_cli} create --empty --overwrite --project-folder #{test_directory}") - expect(File.exist?(test_feature)).to be false - expect(File.exist?(File.join(test_directory, 'mappers', 'Baseline.rb'))).to be true + expect(test_feature.exist?).to be false + expect((test_directory / 'mappers' / 'Baseline.rb').exist?).to be true end it 'sets num_parallel on project creation with env var' do + orig_env_val = ENV['UO_NUM_PARALLEL'] if ENV['UO_NUM_PARALLEL'] ENV['UO_NUM_PARALLEL'] = '3' expect(ENV['UO_NUM_PARALLEL']).to eq('3') system("#{call_cli} create --project-folder #{test_directory}") - runner_file_path = File.join(test_directory, 'runner.conf') + runner_file_path = test_directory / 'runner.conf' runner_conf_hash = JSON.parse(File.read(runner_file_path)) expect(runner_conf_hash['num_parallel']).to eq(3) + # Reset back to original value after test completion + ENV['UO_NUM_PARALLEL'] = orig_env_val end end @@ -239,16 +270,16 @@ def delete_directory_or_file(dir_or_file) end it 'creates a scenario file from a feature file' do - expect(File.exist?(File.join(test_directory, 'baseline_scenario.csv'))).to be false + expect((test_directory / 'baseline_scenario.csv').exist?).to be false system("#{call_cli} create --scenario-file #{test_feature}") - expect(File.exist?(File.join(test_directory, 'baseline_scenario.csv'))).to be true - expect(File.exist?(File.join(test_directory, 'evcharging_scenario.csv'))).to be true + expect((test_directory / 'baseline_scenario.csv').exist?).to be true + expect((test_directory / 'evcharging_scenario.csv').exist?).to be true end it 'creates a scenario file for a single feature from a feature file' do - expect(File.exist?(File.join(test_directory, 'baseline_scenario-2.csv'))).to be false + expect((test_directory / 'baseline_scenario-2.csv').exist?).to be false system("#{call_cli} create --scenario-file #{test_feature} --single-feature 2") - expect(File.exist?(File.join(test_directory, 'baseline_scenario-2.csv'))).to be true + expect((test_directory / 'baseline_scenario-2.csv').exist?).to be true end end @@ -263,29 +294,29 @@ def delete_directory_or_file(dir_or_file) end it 'can update project directory' do - system("#{call_cli} update --existing-project-folder #{test_directory} --new-project-directory #{File.join('spec', 'new_test_directory')}") - expect(Dir.exist?(File.join('spec', 'new_test_directory', 'mappers'))).to be true - expect(File.exist?(File.join('spec', 'new_test_directory', 'example_project.json'))).to be true + system("#{call_cli} update --existing-project-folder #{test_directory} --new-project-directory #{spec_dir / 'new_test_directory'}") + expect((spec_dir / 'new_test_directory' / 'mappers').exist?).to be true + expect((spec_dir / 'new_test_directory' / 'example_project.json').exist?).to be true - system("#{call_cli} update --existing-project-folder #{test_directory_res} --new-project-directory #{File.join('spec', 'new_test_directory_resi')}") - expect(Dir.exist?(File.join('spec', 'new_test_directory_resi', 'mappers', 'residential'))).to be true + system("#{call_cli} update --existing-project-folder #{test_directory_res} --new-project-directory #{spec_dir / 'new_test_directory_resi'}") + expect((spec_dir / 'new_test_directory_resi' / 'mappers' / 'residential').exist?).to be true - system("#{call_cli} update --existing-project-folder #{test_directory_elec} --new-project-directory #{File.join('spec', 'new_test_directory_ele')}") - expect(Dir.exist?(File.join('spec', 'new_test_directory_ele', 'opendss'))).to be true + system("#{call_cli} update --existing-project-folder #{test_directory_elec} --new-project-directory #{spec_dir / 'new_test_directory_ele'}") + expect((spec_dir / 'new_test_directory_ele' / 'opendss').exist?).to be true - delete_directory_or_file(File.join('spec', 'new_test_directory')) - delete_directory_or_file(File.join('spec', 'new_test_directory_resi')) - delete_directory_or_file(File.join('spec', 'new_test_directory_ele')) + delete_directory_or_file(spec_dir / 'new_test_directory') + delete_directory_or_file(spec_dir / 'new_test_directory_resi') + delete_directory_or_file(spec_dir / 'new_test_directory_ele') end end context 'Install python dependencies' do it 'successfully installs python and dependencies' do - config = File.join('example_files', 'python_deps', 'config.json') - FileUtils.rm_rf(config) if File.exist?(config) + config = example_dir / 'python_deps' / 'config.json' + FileUtils.rm_rf(config) if config.exist? system("#{call_cli} install_python") - python_config = File.join('example_files', 'python_deps', 'python_config.json') - expect(File.exist?(python_config)).to be true + python_config = example_dir / 'python_deps' / 'python_config.json' + expect(python_config.exist?).to be true configs = JSON.parse(File.read(python_config)) expect(configs['python_path']).not_to be_falsey @@ -311,66 +342,149 @@ def delete_directory_or_file(dir_or_file) it 'runs a 2 building scenario using default geometry method' do # Use a ScenarioFile with only 2 buildings to reduce test time - system("cp #{File.join('spec', 'spec_files', 'two_building_scenario.csv')} #{test_scenario}") + system("cp #{spec_dir / 'spec_files' / 'two_building_scenario.csv'} #{test_scenario}") system("#{call_cli} run --scenario #{test_scenario} --feature #{test_feature}") - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', '2', 'failed.job'))).to be false - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', '2', 'finished.job'))).to be true - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', '3', 'finished.job'))).to be false + expect((test_directory / 'run' / 'two_building_scenario' / '2' / 'failed.job').exist?).to be false + expect((test_directory / 'run' / 'two_building_scenario' / '2' / 'finished.job').exist?).to be true + expect((test_directory / 'run' / 'two_building_scenario' / '3' / 'finished.job').exist?).to be false + end + + it 'runs a chilled water scenario with residential and commercial buildings' do + # Use a ScenarioFile with only 2 buildings to reduce test time + system("cp #{spec_dir / 'spec_files' / 'two_building_res_chilled_water_scenario.csv'} #{test_scenario_chilled}") + # Include the chilled water mapper file + system("cp #{example_dir / 'mappers' / 'ChilledWaterStorage.rb'} #{test_directory_res / 'mappers' / 'ChilledWaterStorage.rb'}") + # modify the workflow file to include chilled water + additional_measures = ['openstudio_results', 'add_chilled_water_storage_tank'] # 'BuildResidentialModel', + select_measures(test_directory_res, additional_measures) + # Run the residential project with the chilled water measure included in the workflow + system("#{call_cli} run --scenario #{test_scenario_chilled} --feature #{test_feature_res}") + # Turn off the measures activated specifically for this test + select_measures(test_directory_res, additional_measures, skip_setting: true) + expect((test_directory_res / 'run' / 'two_building_chilled' / '5' / 'finished.job').exist?).to be true + expect((test_directory_res / 'run' / 'two_building_chilled' / '16' / 'finished.job').exist?).to be true + end + + it 'runs a peak-hours MEL reduction scenario with residential and commercial buildings' do + # Use a ScenarioFile with only 2 buildings to reduce test time + system("cp #{spec_dir / 'spec_files' / 'two_building_res_peak_hours_mel_reduction.csv'} #{test_scenario_mels_reduction}") + # Include the MEL reduction mapper file + system("cp #{example_dir / 'mappers' / 'PeakHoursMelsShedding.rb'} #{test_directory_res / 'mappers' / 'PeakHoursMelsShedding.rb'}") + # modify the workflow file to include MEL reduction + additional_measures = ['openstudio_results', 'reduce_epd_by_percentage_for_peak_hours'] # 'BuildResidentialModel', + select_measures(test_directory_res, additional_measures) + # Run the residential project with the MEL reduction measure included in the workflow + system("#{call_cli} run --scenario #{test_scenario_mels_reduction} --feature #{test_feature_res}") + # Turn off the measures activated specifically for this test + select_measures(test_directory_res, additional_measures, skip_setting: true) + expect((test_directory_res / 'run' / 'two_building_mels_reduction' / '5' / 'finished.job').exist?).to be true + expect((test_directory_res / 'run' / 'two_building_mels_reduction' / '16' / 'finished.job').exist?).to be true + end + + it 'runs a peak-hours thermostat adjustment scenario with residential and commercial buildings' do + # Use a ScenarioFile with only 2 buildings to reduce test time + system("cp #{spec_dir / 'spec_files' / 'two_building_res_stat_adjustment.csv'} #{test_scenario_stat_adjustment}") + # Include the thermostat adjustment mapper file + system("cp #{example_dir / 'mappers' / 'PeakHoursThermostatAdjust.rb'} #{test_directory_res / 'mappers' / 'PeakHoursThermostatAdjust.rb'}") + # modify the workflow file to include thermostat adjustment + additional_measures = ['openstudio_results', 'AdjustThermostatSetpointsByDegreesForPeakHours'] # 'BuildResidentialModel', + select_measures(test_directory_res, additional_measures) + # Run the residential project with the thermostat adjustment measure included in the workflow + system("#{call_cli} run --scenario #{test_scenario_stat_adjustment} --feature #{test_feature_res}") + # Turn off the measures activated specifically for this test + select_measures(test_directory_res, additional_measures, skip_setting: true) + expect((test_directory_res / 'run' / 'two_building_stat_adjustment' / '5' / 'finished.job').exist?).to be true + expect((test_directory_res / 'run' / 'two_building_stat_adjustment' / '16' / 'finished.job').exist?).to be true + end + + it 'runs a flexible hot water scenario' do + # https://github.com/NREL/openstudio-load-flexibility-measures-gem/tree/master/lib/measures/add_hpwh + # Use a ScenarioFile with only 2 buildings to reduce test time + system("cp #{spec_dir / 'spec_files' / 'two_building_flexible_hot_water.csv'} #{test_scenario_flexible_hot_water}") + # Include the flexible hot water mapper file + system("cp #{example_dir / 'mappers' / 'FlexibleHotWater.rb'} #{test_directory / 'mappers' / 'FlexibleHotWater.rb'}") + # modify the workflow file to include flexible hot water + additional_measures = ['openstudio_results', 'add_hpwh'] # 'BuildResidentialModel', + select_measures(test_directory, additional_measures) + # Run the residential project with the flexible hot water measure included in the workflow + system("#{call_cli} run --scenario #{test_scenario_flexible_hot_water} --feature #{test_feature}") + # Turn off the measures activated specifically for this test + select_measures(test_directory, additional_measures, skip_setting: true) + expect((test_directory / 'run' / 'two_building_flexible_hot_water' / '5' / 'finished.job').exist?).to be true + expect((test_directory / 'run' / 'two_building_flexible_hot_water' / '2' / 'finished.job').exist?).to be true + end + + it 'runs a ice-storage scenario' do + # https://github.com/NREL/openstudio-load-flexibility-measures-gem/tree/master/lib/measures/add_central_ice_storage + # Use a ScenarioFile with only 2 buildings to reduce test time + system("cp #{spec_dir / 'spec_files' / 'two_building_thermal_storage_scenario.csv'} #{test_scenario_thermal_storage}") + # Include the thermal storage mapper file + system("cp #{example_dir / 'mappers' / 'ThermalStorage.rb'} #{test_directory / 'mappers' / 'ThermalStorage.rb'}") + # modify the workflow file to include thermal storage + additional_measures = ['openstudio_results', 'add_central_ice_storage'] + select_measures(test_directory, additional_measures) + # Run the residential project with the thermal storage measures included in the workflow + system("#{call_cli} run --scenario #{test_scenario_thermal_storage} --feature #{test_feature}") + # Turn off the measures activated specifically for this test + select_measures(test_directory, additional_measures, skip_setting: true) + expect((test_directory / 'run' / 'two_building_thermal_storage' / '1' / 'finished.job').exist?).to be true + expect((test_directory / 'run' / 'two_building_thermal_storage' / '12' / 'finished.job').exist?).to be true end it 'runs a 2 building scenario with residential and commercial buildings' do - system("cp #{File.join('spec', 'spec_files', 'two_building_res.csv')} #{test_scenario_res}") + system("cp #{spec_dir / 'spec_files' / 'two_building_res.csv'} #{test_scenario_res}") system("#{call_cli} run --scenario #{test_scenario_res} --feature #{test_feature_res}") - expect(File.exist?(File.join(test_directory_res, 'run', 'two_building_res', '5', 'finished.job'))).to be true - expect(File.exist?(File.join(test_directory_res, 'run', 'two_building_res', '16', 'finished.job'))).to be true + expect((test_directory_res / 'run' / 'two_building_res' / '5' / 'finished.job').exist?).to be true + expect((test_directory_res / 'run' / 'two_building_res' / '16' / 'finished.job').exist?).to be true end it 'runs a 2 building scenario using create bar geometry method' do # Copy create bar specific files - system("cp #{File.join('example_files', 'mappers', 'CreateBar.rb')} #{File.join(test_directory, 'mappers', 'CreateBar.rb')}") - system("cp #{File.join('example_files', 'mappers', 'createbar_workflow.osw')} #{File.join(test_directory, 'mappers', 'createbar_workflow.osw')}") - system("cp #{File.join('spec', 'spec_files', 'two_building_create_bar.csv')} #{File.join(test_directory, 'two_building_create_bar.csv')}") - system("#{call_cli} run --scenario #{File.join(test_directory, 'two_building_create_bar.csv')} --feature #{test_feature}") - expect(File.exist?(File.join(test_directory, 'run', 'two_building_create_bar', '2', 'finished.job'))).to be true + system("cp #{example_dir / 'mappers' / 'CreateBar.rb'} #{test_directory / 'mappers' / 'CreateBar.rb'}") + system("cp #{example_dir / 'mappers' / 'createbar_workflow.osw'} #{test_directory / 'mappers' / 'createbar_workflow.osw'}") + system("cp #{spec_dir / 'spec_files' / 'two_building_create_bar.csv'} #{test_directory / 'two_building_create_bar.csv'}") + system("#{call_cli} run --scenario #{test_directory / 'two_building_create_bar.csv'} --feature #{test_feature}") + expect((test_directory / 'run' / 'two_building_create_bar' / '2' / 'finished.job').exist?).to be true end it 'runs a 2 building scenario using floorspace geometry method' do # Copy floorspace specific files - system("cp #{File.join('example_files', 'mappers', 'Floorspace.rb')} #{File.join(test_directory, 'mappers', 'Floorspace.rb')}") - system("cp #{File.join('example_files', 'mappers', 'floorspace_workflow.osw')} #{File.join(test_directory, 'mappers', 'floorspace_workflow.osw')}") - system("cp #{File.join('example_files', 'osm_building', '7_floorspace.json')} #{File.join(test_directory, 'osm_building', '7_floorspace.json')}") - system("cp #{File.join('example_files', 'osm_building', '7_floorspace.osm')} #{File.join(test_directory, 'osm_building', '7_floorspace.osm')}") - system("cp #{File.join('example_files', 'example_floorspace_project.json')} #{File.join(test_directory, 'example_floorspace_project.json')}") - system("cp #{File.join('spec', 'spec_files', 'two_building_floorspace.csv')} #{File.join(test_directory, 'two_building_floorspace.csv')}") - system("#{call_cli} run --scenario #{File.join(test_directory, 'two_building_floorspace.csv')} --feature #{File.join('../example_files/example_floorspace_project.json')}") - expect(File.exist?(File.join(test_directory, 'run', 'two_building_floorspace', '5', 'finished.job'))).to be true - expect(File.exist?(File.join(test_directory, 'run', 'two_building_floorspace', '7', 'finished.job'))).to be true + system("cp #{example_dir / 'mappers' / 'Floorspace.rb'} #{test_directory / 'mappers' / 'Floorspace.rb'}") + system("cp #{example_dir / 'mappers' / 'floorspace_workflow.osw'} #{test_directory / 'mappers' / 'floorspace_workflow.osw'}") + system("cp #{example_dir / 'osm_building' / '7_floorspace.json'} #{test_directory / 'osm_building' / '7_floorspace.json'}") + system("cp #{example_dir / 'osm_building' / '7_floorspace.osm'} #{test_directory / 'osm_building' / '7_floorspace.osm'}") + system("cp #{example_dir / 'example_floorspace_project.json'} #{test_directory / 'example_floorspace_project.json'}") + system("cp #{spec_dir / 'spec_files' / 'two_building_floorspace.csv'} #{test_directory / 'two_building_floorspace.csv'}") + expect((test_directory / 'osm_building' / '7_floorspace.osm').exist?).to be true + system("#{call_cli} run --scenario #{test_directory / 'two_building_floorspace.csv'} --feature #{test_directory / 'example_floorspace_project.json'}") + expect((test_directory / 'run' / 'two_building_floorspace' / '5' / 'finished.job').exist?).to be true + expect((test_directory / 'run' / 'two_building_floorspace' / '7' / 'finished.job').exist?).to be true end it 'runs an ev-charging scenario' do # copy ev-charging specific files - system("cp #{File.join('spec', 'spec_files', 'two_building_ev_scenario.csv')} #{test_ev_scenario}") + system("cp #{spec_dir / 'spec_files' / 'two_building_ev_scenario.csv'} #{test_ev_scenario}") system("#{call_cli} run --scenario #{test_ev_scenario} --feature #{test_feature}") - expect(File.exist?(File.join(test_directory, 'run', 'two_building_ev_scenario', '5', 'finished.job'))).to be true - expect(File.exist?(File.join(test_directory, 'run', 'two_building_ev_scenario', '2', 'finished.job'))).to be true + expect((test_directory / 'run' / 'two_building_ev_scenario' / '5' / 'finished.job').exist?).to be true + expect((test_directory / 'run' / 'two_building_ev_scenario' / '2' / 'finished.job').exist?).to be true end it 'runs an electrical network scenario' do - system("cp #{File.join('spec', 'spec_files', 'electrical_scenario.csv')} #{test_scenario_elec}") + system("cp #{spec_dir / 'spec_files' / 'electrical_scenario.csv'} #{test_scenario_elec}") system("#{call_cli} run --scenario #{test_scenario_elec} --feature #{test_feature_elec}") - expect(File.exist?(File.join(test_directory_elec, 'run', 'electrical_scenario', '13', 'finished.job'))).to be true + expect((test_directory_elec / 'run' / 'electrical_scenario' / '13' / 'finished.job').exist?).to be true end it 'runs a PV scenario when called with reopt' do - system("cp #{File.join('spec', 'spec_files', 'REopt_scenario.csv')} #{test_reopt_scenario}") + system("cp #{spec_dir / 'spec_files' / 'REopt_scenario.csv'} #{test_reopt_scenario}") # Copy in reopt folder - system("cp -R #{File.join('spec', 'spec_files', 'reopt')} #{File.join(test_directory_pv, 'reopt')}") + system("cp -R #{spec_dir / 'spec_files' / 'reopt'} #{test_directory_pv / 'reopt'}") system("#{call_cli} run --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") - expect(File.exist?(File.join(test_directory_pv, 'reopt'))).to be true - expect(File.exist?(File.join(test_directory_pv, 'reopt/base_assumptions.json'))).to be true - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', '5', 'finished.job'))).to be true - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', '2', 'finished.job'))).to be true - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', '3', 'finished.job'))).to be false + expect((test_directory_pv / 'reopt').exist?).to be true + expect((test_directory_pv / 'reopt' / 'base_assumptions.json').exist?).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / '5' / 'finished.job').exist?).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / '2' / 'finished.job').exist?).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / '3' / 'finished.job').exist?).to be false end it 'post-processor closes gracefully if given an invalid type' do @@ -395,144 +509,158 @@ def delete_directory_or_file(dir_or_file) end it 'default post-processes a scenario' do - test_scenario_report = File.join(test_directory, 'run', 'two_building_scenario', 'default_scenario_report.csv') + # This test requires the 'runs a 2 building scenario using default geometry method' be run first + test_scenario_report = test_directory / 'run' / 'two_building_scenario' / 'default_scenario_report.csv' system("#{call_cli} process --default --scenario #{test_scenario} --feature #{test_feature}") expect(`wc -l < #{test_scenario_report}`.to_i).to be > 2 - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'process_status.json'))).to be true + expect((test_directory / 'run' / 'two_building_scenario' / 'process_status.json').exist?).to be true end it 'successfully runs the rnm workflow' do + # This test requires the 'runs a 2 building scenario using default geometry method' be run first # copy featurefile in dir rnm_file = 'example_project_with_streets.json' - system("cp #{File.join('spec', 'spec_files', rnm_file)} #{File.join(test_directory, rnm_file)}") + system("cp #{spec_dir / 'spec_files' / rnm_file} #{test_directory / rnm_file}") # call rnm - test_rnm_file = File.join(test_directory, rnm_file) + test_rnm_file = test_directory / rnm_file system("#{call_cli} rnm --scenario #{test_scenario} --feature #{test_rnm_file}") # check that rnm inputs and outputs were created - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'rnm-us', 'inputs.zip'))).to be true - expect(Dir.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'rnm-us', 'results'))).to be true - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'scenario_report_rnm.json'))).to be true - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'feature_file_rnm.json'))).to be true + expect((test_directory / 'run' / 'two_building_scenario' / 'rnm-us' / 'inputs.zip').exist?).to be true + expect((test_directory / 'run' / 'two_building_scenario' / 'rnm-us' / 'results').exist?).to be true + expect((test_directory / 'run' / 'two_building_scenario' / 'scenario_report_rnm.json').exist?).to be true + expect((test_directory / 'run' / 'two_building_scenario' / 'feature_file_rnm.json').exist?).to be true end it 'successfully gets results from the opendss cli' do + # This test requires the 'runs an electrical network scenario' be run first system("#{call_cli} process --default --scenario #{test_scenario_elec} --feature #{test_feature_elec}") system("#{call_cli} opendss --scenario #{test_scenario_elec} --feature #{test_feature_elec} --start-date 2017/01/15 --start-time 01:00:00 --end-date 2017/01/16 --end-time 00:00:00") - expect(File.exist?(File.join(test_directory_elec, 'run', 'electrical_scenario', 'opendss', 'profiles', 'load_1.csv'))).to be true + expect((test_directory_elec / 'run' / 'electrical_scenario' / 'opendss' / 'profiles' / 'load_1.csv').exist?).to be true expect { system("#{call_cli} opendss --scenario #{test_scenario_elec} --feature #{test_feature_elec} --start-date 2017/01/15 --start-time 01:00:00 --end-date 2017/01/16 --end-time 00:00:00 --upgrade") } .to output(a_string_including('Upgrading undersized transformers:')) .to_stdout_from_any_process - expect(File.exist?(File.join(test_directory_elec, 'run', 'electrical_scenario', 'opendss', 'profiles', 'load_1.csv'))).to be true + expect((test_directory_elec / 'run' / 'electrical_scenario' / 'opendss' / 'profiles' / 'load_1.csv').exist?).to be true end it 'successfully runs disco simulation' do + # This test requires the 'runs an electrical network scenario' be run first system("#{call_cli} disco --scenario #{test_scenario_elec} --feature #{test_feature_elec}") - expect(File.exist?(File.join(test_directory_elec, 'run', 'electrical_scenario', 'disco'))).to be true + expect((test_directory_elec / 'run' / 'electrical_scenario' / 'disco').exist?).to be true end it 'saves post-process output as a database file' do - db_filename = File.join(test_directory, 'run', 'two_building_scenario', 'default_scenario_report.db') + # This test requires the 'runs a 2 building scenario using default geometry method' be run first + db_filename = test_directory / 'run' / 'two_building_scenario' / 'default_scenario_report.db' system("#{call_cli} process --default --with-database --scenario #{test_scenario} --feature #{test_feature}") expect(`wc -l < #{db_filename}`.to_i).to be > 20 - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'process_status.json'))).to be true + expect((test_directory / 'run' / 'two_building_scenario' / 'process_status.json').exist?).to be true end it 'reopt post-processes a scenario and visualize' do + # This test requires the 'runs a PV scenario when called with reopt' be run first system("#{call_cli} process --default --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") system("#{call_cli} process --reopt-scenario --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', 'scenario_optimization.json'))).to be true - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', 'process_status.json'))).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / 'scenario_optimization.json').exist?).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / 'process_status.json').exist?).to be true # and visualize system("#{call_cli} visualize --feature #{test_feature_pv}") - expect(File.exist?(File.join(test_directory_pv, 'run', 'scenario_comparison.html'))).to be true + expect((test_directory_pv / 'run' / 'scenario_comparison.html').exist?).to be true end it 'reopt post-processes a scenario with specified scenario assumptions file' do + # This test requires the 'runs a PV scenario when called with reopt' be run first system("#{call_cli} process --default --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") expect { system("#{call_cli} process --reopt-scenario -a #{test_reopt_scenario_assumptions_file} --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") } .to output(a_string_including('multiPV_assumptions.json')) .to_stdout_from_any_process - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', 'scenario_optimization.json'))).to be true - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', 'process_status.json'))).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / 'scenario_optimization.json').exist?).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / 'process_status.json').exist?).to be true end it 'reopt post-processes a scenario with resilience reporting' do + # This test requires the 'runs a PV scenario when called with reopt' be run first system("#{call_cli} process --default --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") system("#{call_cli} process --reopt-scenario --reopt-resilience --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', 'scenario_optimization.json'))).to be true - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', 'process_status.json'))).to be true - path_to_resilience_report_file = File.join(test_directory_pv, 'run', 'reopt_scenario', 'reopt', 'scenario_report_reopt_scenario_reopt_run_resilience.json') + expect((test_directory_pv / 'run' / 'reopt_scenario' / 'scenario_optimization.json').exist?).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / 'process_status.json').exist?).to be true + # path_to_resilience_report_file = test_directory_pv / 'run' / 'reopt_scenario' / 'reopt' / 'scenario_report_reopt_scenario_reopt_run_resilience.json' end it 'reopt post-processes each feature and visualize' do + # This test requires the 'runs a PV scenario when called with reopt' be run first system("#{call_cli} process --default --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") system("#{call_cli} process --reopt-feature --scenario #{test_reopt_scenario} --feature #{test_feature_pv}") - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', 'feature_optimization.csv'))).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / 'feature_optimization.csv').exist?).to be true # and visualize system("#{call_cli} visualize --scenario #{test_reopt_scenario}") - expect(File.exist?(File.join(test_directory_pv, 'run', 'reopt_scenario', 'feature_comparison.html'))).to be true + expect((test_directory_pv / 'run' / 'reopt_scenario' / 'feature_comparison.html').exist?).to be true end it 'opendss post-processes a scenario' do - expect(File.exist?(File.join(test_directory_elec, 'run', 'electrical_scenario', '2', 'feature_reports', 'default_feature_report_opendss.csv'))).to be false + # This test requires the 'successfully gets results from the opendss cli' be run first + expect((test_directory_elec / 'run' / 'electrical_scenario' / '2' / 'feature_reports' / 'default_feature_report_opendss.csv').exist?).to be false system("#{call_cli} process --opendss --scenario #{test_scenario_elec} --feature #{test_feature_elec}") - expect(File.exist?(File.join(test_directory_elec, 'run', 'electrical_scenario', '2', 'feature_reports', 'default_feature_report_opendss.csv'))).to be true - expect(File.exist?(File.join(test_directory_elec, 'run', 'electrical_scenario', 'process_status.json'))).to be true + expect((test_directory_elec / 'run' / 'electrical_scenario' / '2' / 'feature_reports' / 'default_feature_report_opendss.csv').exist?).to be true + expect((test_directory_elec / 'run' / 'electrical_scenario' / 'process_status.json').exist?).to be true end it 'creates scenario visualization for default post processor' do + # This test requires the 'runs a 2 building scenario using default geometry method' be run first # visualizing via the FeatureFile will throw error to stdout (but not crash) if a scenario that uses those features isn't processed first. system("#{call_cli} process --default --scenario #{test_scenario} --feature #{test_feature}") system("#{call_cli} process --default --scenario #{test_ev_scenario} --feature #{test_feature}") system("#{call_cli} visualize --feature #{test_feature}") - expect(File.exist?(File.join(test_directory, 'run', 'scenario_comparison.html'))).to be true + expect((test_directory / 'run' / 'scenario_comparison.html').exist?).to be true end it 'creates feature visualization for default post processor' do + # This test requires the 'runs a 2 building scenario using default geometry method' be run first system("#{call_cli} process --default --scenario #{test_scenario} --feature #{test_feature}") system("#{call_cli} visualize --scenario #{test_scenario}") - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'feature_comparison.html'))).to be true + expect((test_directory / 'run' / 'two_building_scenario' / 'feature_comparison.html').exist?).to be true end it 'ensures viz files are in the project directory' do - if File.exist?(File.join(test_directory, 'visualization', 'input_visualization_feature.html')) - FileUtils.rm_rf(File.join(test_directory, 'visualization', 'input_visualization_feature.html')) + # This test requires the 'runs a 2 building scenario using default geometry method' be run first + if (test_directory / 'visualization' / 'input_visualization_feature.html').exist? + FileUtils.rm_rf(test_directory / 'visualization' / 'input_visualization_feature.html') end - if File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'feature_comparison.html')) - FileUtils.rm_rf(File.join(test_directory, 'run', 'two_building_scenario', 'feature_comparison.html')) + if (test_directory / 'run' / 'two_building_scenario' / 'feature_comparison.html').exist? + FileUtils.rm_rf(test_directory / 'run' / 'two_building_scenario' / 'feature_comparison.html') end - if File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'scenarioData.js')) - FileUtils.rm_rf(File.join(test_directory, 'run', 'two_building_scenario', 'scenarioData.js')) + if (test_directory / 'run' / 'two_building_scenario' / 'scenarioData.js').exist? + FileUtils.rm_rf(test_directory / 'run' / 'two_building_scenario' / 'scenarioData.js') end - expect(File.exist?(File.join(test_directory, 'visualization', 'input_visualization_feature.html'))).to be false - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'feature_comparison.html'))).to be false - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'scenarioData.js'))).to be false + expect((test_directory / 'visualization' / 'input_visualization_feature.html').exist?).to be false + expect((test_directory / 'run' / 'two_building_scenario' / 'feature_comparison.html').exist?).to be false + expect((test_directory / 'run' / 'two_building_scenario' / 'scenarioData.js').exist?).to be false system("#{call_cli} visualize --scenario #{test_scenario}") - expect(File.exist?(File.join(test_directory, 'run', 'two_building_scenario', 'feature_comparison.html'))).to be true + expect((test_directory / 'run' / 'two_building_scenario' / 'feature_comparison.html').exist?).to be true end it 'validates eui' do - test_validation_file = File.join(test_directory_res, 'validation_schema.yaml') + # This test requires the 'runs a 2 building scenario with residential and commercial buildings' be run first + test_validation_file = test_directory_res / 'validation_schema.yaml' expect { system("#{call_cli} validate --eui #{test_validation_file} --scenario #{test_scenario_res} --feature #{test_feature_res}") } .to output(a_string_including('is within bounds set by')) .to_stdout_from_any_process - system("cp #{File.join('spec', 'spec_files', 'out_of_bounds_validation.yaml')} #{test_validate_bounds}") + system("cp #{spec_dir / 'spec_files' / 'out_of_bounds_validation.yaml'} #{test_validate_bounds}") expect { system("#{call_cli} validate --eui #{test_validate_bounds} --scenario #{test_scenario_res} --feature #{test_feature_res}") } - .to output(a_string_including('kBtu/ft2/yr is greater than the validation maximum.')) + .to output(a_string_including('kBtu/ft2/yr is greater than the validation maximum')) .to_stdout_from_any_process expect { system("#{call_cli} validate --eui #{test_validate_bounds} --scenario #{test_scenario_res} --feature #{test_feature_res}") } - .to output(a_string_including('is less than the validation minimum.')) + .to output(a_string_including('is less than the validation minimum')) .to_stdout_from_any_process expect { system("#{call_cli} validate --eui #{test_validate_bounds} --scenario #{test_scenario_res} --feature #{test_feature_res} --units SI") } - .to output(a_string_including('kWh/m2/yr is less than the validation minimum.')) + .to output(a_string_including('kWh/m2/yr is less than the validation minimum')) .to_stdout_from_any_process end it 'deletes a scenario' do - expect(File.exist?(File.join(test_directory, 'run', 'two_building_create_bar', '2', 'data_point_out.json'))).to be true - bar_scenario = File.join(test_directory, 'two_building_create_bar.csv') + expect((test_directory / 'run' / 'two_building_create_bar' / '2' / 'data_point_out.json').exist?).to be true + bar_scenario = test_directory / 'two_building_create_bar.csv' system("#{call_cli} delete --scenario #{bar_scenario}") - expect(File.exist?(File.join(test_directory, 'run', 'two_building_create_bar', '2', 'data_point_out.json'))).to be false + expect((test_directory / 'run' / 'two_building_create_bar' / '2' / 'data_point_out.json').exist?).to be false end end end diff --git a/uo_cli.gemspec b/uo_cli.gemspec index 78e9bfdf..fa9719be 100644 --- a/uo_cli.gemspec +++ b/uo_cli.gemspec @@ -32,7 +32,7 @@ Gem::Specification.new do |spec| spec.require_paths = ['lib', 'example_files'] spec.required_ruby_version = '~> 2.7.0' - # use specific versions of urbanopt and openstudio dependencies while under heavy development + # use specific versions of urbanopt and openstudio dependencies while under heavy development spec.add_runtime_dependency 'optimist', '~> 3' spec.add_runtime_dependency 'urbanopt-geojson', '~> 0.9.0' spec.add_runtime_dependency 'urbanopt-reopt', '~> 0.9.0' @@ -47,4 +47,6 @@ Gem::Specification.new do |spec| spec.add_development_dependency 'rubocop', '~> 1.15.0' spec.add_development_dependency 'rubocop-checkstyle_formatter', '~> 0.4.0' spec.add_development_dependency 'rubocop-performance', '~> 1.11.3' + spec.add_development_dependency 'simplecov', '~> 0.18.2' + spec.add_development_dependency 'simplecov-lcov', '~> 0.8.0' end