From 82f89c3efe2807354ab59af00d6739aca328ed51 Mon Sep 17 00:00:00 2001 From: Matt L <124107509+mattjala@users.noreply.github.com> Date: Wed, 18 Dec 2024 11:52:52 -0600 Subject: [PATCH] Fix dataset creation failure on Ubuntu 24 (#131) * Remove vol-tests submodule * Show HSDS log on CI failure * Fix warnings * Add Ubuntu 22/24 to CI * Fix bad length in snprintf --- .github/workflows/main.yml | 213 ++++++++++++------------------------- .gitmodules | 3 - build_vol_autotools.sh | 6 -- build_vol_cmake.sh | 22 ---- src/rest_vol.c | 23 ++-- src/rest_vol_attr.c | 8 +- src/rest_vol_dataset.c | 25 ++--- src/rest_vol_datatype.c | 4 +- src/rest_vol_group.c | 2 +- src/rest_vol_link.c | 2 +- src/rest_vol_object.c | 2 +- test/vol-tests | 1 - 12 files changed, 97 insertions(+), 214 deletions(-) delete mode 160000 test/vol-tests diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fb6f053a..e2b5ff4c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -27,13 +27,18 @@ env: BUCKET_NAME: hsdstest jobs: - build_and_test_with_autotools: + build_and_test: strategy: fail-fast: false matrix: - os: [ubuntu-latest] + os: [ubuntu-22.04, ubuntu-latest] + build_system: ["autotools", "cmake"] python-version: ["3.10"] - hdf5-branch: ["hdf5_1_14"] + hdf5-branch: ["hdf5_1_14", "develop"] + exclude: + # hdf5 2.0.0+ does not support autotools + - hdf5-branch: "develop" + build_system: "autotools" runs-on: ${{matrix.os}} steps: @@ -50,6 +55,7 @@ jobs: sudo apt-get install libyajl-dev - name: Get Autotools Dependencies + if: matrix.build_system == 'autotools' run: | sudo apt update sudo apt install automake autoconf libtool libtool-bin @@ -59,7 +65,25 @@ jobs: with: path: ${{github.workspace}}/vol-rest - - name: Autotools Configure + Build HDF5 + - name: CMake Configure + Build HDF5 + if: matrix.build_system == 'cmake' + run: | + mkdir ${{github.workspace}}/hdf5/build + cd ./build + cmake \ + -DHDF5_BUILD_HL_LIB=ON \ + -DBUILD_SHARED_LIBS=ON \ + -DHDF5_TEST_API=ON \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/hdf5install \ + .. + make + make install + shell: bash + working-directory: ${{github.workspace}}/hdf5 + + - name: Autotools Configure + Build HDF5 + if: matrix.build_system == 'autotools' run: | ./autogen.sh ./configure --prefix=${{github.workspace}}/hdf5install \ @@ -70,21 +94,44 @@ jobs: shell: bash working-directory: ${{github.workspace}}/hdf5 + - name: CMake Configure REST VOL + if: matrix.build_system == 'cmake' + run: | + mkdir ./build + cd ./build + CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "Unix Makefiles" -DHDF5_ROOT=${{github.workspace}}/hdf5install \ + -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/vol-rest/install \ + .. + shell: bash + working-directory: ${{github.workspace}}/vol-rest + - name: Autotools Configure REST VOL + if: matrix.build_system == 'autotools' run: | ./autogen.sh mkdir ${{github.workspace}}/vol-rest/install - CFLAGS="-D_POSIX_C_SOURCE=200809L" ./configure --prefix=${{github.workspace}}/vol-rest/install --with-hdf5=${{github.workspace}}/hdf5install + CFLAGS="-D_POSIX_C_SOURCE=200809L" ./configure \ + --prefix=${{github.workspace}}/vol-rest/install \ + --with-hdf5=${{github.workspace}}/hdf5install shell: bash working-directory: ${{github.workspace}}/vol-rest - - name: Build + Install REST VOL + - name: Build + Install REST VOL (Autotools) + if: matrix.build_system == 'autotools' run: | - make + make -j make install shell: bash working-directory: ${{github.workspace}}/vol-rest/ + - name: Build + Install REST VOL (CMake) + if: matrix.build_system == 'cmake' + run: | + make -j + make install + shell: bash + working-directory: ${{github.workspace}}/vol-rest/build/ + - uses: actions/checkout@v4 with: repository: HDFGroup/hsds @@ -114,19 +161,12 @@ jobs: cd ${{github.workspace}}/hsds pytest - - name: Install valgrind - run: | - sudo apt update - sudo apt install valgrind - working-directory: ${{ github.workspace }} - # Requests 2.32.0 breaks requests-unixsocket, used by HSDS for socket connections - name: Fix requests version run: | pip install requests==2.31.0 - name: Start HSDS - if: ${{ matrix.endpoint != 'http://127.0.0.1:5101'}} run: | cd ${{github.workspace}}/hsds mkdir ${{github.workspace}}/hsdsdata && @@ -140,149 +180,34 @@ jobs: working-directory: ${{github.workspace}}/hsds - name: Test HSDS - if: ${{matrix.endpoint != 'http://127.0.0.1:5101'}} run: | python tests/integ/setup_test.py working-directory: ${{github.workspace}}/hsds - - name: Test REST VOL + - name: Test REST VOL (Autotools) + if: matrix.build_system == 'autotools' working-directory: ${{github.workspace}}/vol-rest/ + env: + HDF5_PLUGIN_PATH: ${{github.workspace}}/vol-rest/install/lib run: | - HDF5_PLUGIN_PATH=${{github.workspace}}/vol-rest/install/lib HDF5_VOL_CONNECTOR=REST ./test/test_rest_vol - - build_and_test_with_cmake: - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ["3.10"] - hdf5-branch: ["hdf5_1_14", "develop"] - - runs-on: ${{matrix.os}} - steps: - - uses: actions/checkout@v4 - with: - repository: HDFGroup/hdf5 - ref: ${{matrix.hdf5-branch}} - path: ${{github.workspace}}/hdf5 - - - name: Get REST VOL dependencies - run: | - sudo apt-get update - sudo apt-get install libcurl4-openssl-dev - sudo apt-get install libyajl-dev - - - name: Get REST VOL - uses: actions/checkout@v4 - with: - path: ${{github.workspace}}/vol-rest + ./test/test_rest_vol - - name: CMake Configure + Build HDF5 - run: | - mkdir ${{github.workspace}}/hdf5/build - cd ./build - cmake \ - -DHDF5_BUILD_HL_LIB=ON \ - -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_SZIP_SUPPORT=OFF \ - -DHDF5_TEST_API=ON \ - -DHDF5_ENABLE_Z_LIB_SUPPORT=OFF \ - -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHDF5_ENABLE_THREADSAFE=OFF \ - -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/hdf5install \ - .. - make - make install - shell: bash - working-directory: ${{github.workspace}}/hdf5 - - - name: CMake Configure REST VOL - run: | - mkdir ./build - cd ./build - CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "Unix Makefiles" -DHDF5_ROOT=${{github.workspace}}/hdf5install \ - -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/vol-rest/install \ - .. - shell: bash - working-directory: ${{github.workspace}}/vol-rest - - - name: Build + Install REST VOL - run: | - make - make install - shell: bash - working-directory: ${{github.workspace}}/vol-rest/build - - - uses: actions/checkout@v4 - with: - repository: HDFGroup/hsds - path: ${{github.workspace}}/hsds - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Install HSDS dependencies - shell: bash - run: | - python -m pip install --upgrade pip - python -m pip install pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - - name: Install HSDS package - shell: bash - run: | - cd ${{github.workspace}}/hsds - pip install -e . - - - name: Run HSDS unit tests - shell: bash - run: | - cd ${{github.workspace}}/hsds - pytest - - - name: Install valgrind - run: | - sudo apt update - sudo apt install valgrind - working-directory: ${{ github.workspace }} - - # Requests 2.32.0 breaks requests-unixsocket, used by HSDS for socket connections - - name: Fix requests version + - name: Test REST VOL (CMake) + if: matrix.build_system == 'cmake' + working-directory: ${{github.workspace}}/vol-rest/build/ + env: + HDF5_PLUGIN_PATH: ${{github.workspace}}/vol-rest/install/lib run: | - pip install requests==2.31.0 + ctest -R "test_rest_vol" -VV - - name: Start HSDS - if: ${{ matrix.endpoint != 'http://127.0.0.1:5101'}} - run: | - cd ${{github.workspace}}/hsds - mkdir ${{github.workspace}}/hsdsdata && - mkdir ${{github.workspace}}/hsdsdata/hsdstest && - cp admin/config/groups.default admin/config/groups.txt && - cp admin/config/passwd.default admin/config/passwd.txt && - cp admin/config/groups.default admin/config/groups.txt && - cp admin/config/passwd.default admin/config/passwd.txt - ROOT_DIR=${{github.workspace}}/hsdadata ./runall.sh --no-docker 1 & - sleep 10 + - name: Show HSDS Logs on Fail + if: ${{failure()}} working-directory: ${{github.workspace}}/hsds - - - name: Test HSDS - if: ${{matrix.endpoint != 'http://127.0.0.1:5101'}} - run: | - python tests/integ/setup_test.py - working-directory: ${{github.workspace}}/hsds - - - name: Set HDF5 Plugin path - run: | - echo "HDF5_PLUGIN_PATH=${{github.workspace}}/vol-rest/build/bin/" >> $GITHUB_ENV - echo "HDF5_VOL_CONNECTOR=REST" >> $GITHUB_ENV - - - name: Test REST VOL - working-directory: ${{github.workspace}}/vol-rest/build/ run: | - valgrind --leak-check=full -s ctest -R "test_rest_vol" -VV + cat hs.log # TODO: Attribute, dataset, link, and testhdf5 tests currently fail # - name: Test REST VOL with API # run: | -# valgrind --leak-check=full -s ctest -R "vol-rest" -VV +# ctest -R "vol-rest" -VV # working-directory: ${{github.workspace}}/hdf5/build/ diff --git a/.gitmodules b/.gitmodules index 7c1f0753..e69de29b 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +0,0 @@ -[submodule "test/vol-tests"] - path = test/vol-tests - url = https://github.com/HDFGroup/vol-tests diff --git a/build_vol_autotools.sh b/build_vol_autotools.sh index 8ce30bfb..c5e0fc45 100755 --- a/build_vol_autotools.sh +++ b/build_vol_autotools.sh @@ -163,12 +163,6 @@ if [ "$NPROCS" -eq "0" ]; then fi fi -# Ensure that the HDF5 and VOL tests submodules get checked out -if [ -z "$(ls -A ${SCRIPT_DIR}/${HDF5_DIR})" ]; then - git submodule init - git submodule update -fi - # If the user hasn't already, first build HDF5 if [ "$build_hdf5" = true ]; then echo "*****************" diff --git a/build_vol_cmake.sh b/build_vol_cmake.sh index 1655029b..13a519f1 100755 --- a/build_vol_cmake.sh +++ b/build_vol_cmake.sh @@ -175,12 +175,6 @@ if [ "$NPROCS" -eq "0" ]; then fi fi -# Ensure that the vol-tests submodule gets checked out -if [ -z "$(ls -A ${SCRIPT_DIR}/test/vol-tests)" ]; then - git submodule init - git submodule update -fi - # Build the REST VOL connector against HDF5. echo "*******************************************" echo "* Building REST VOL connector and test suite *" @@ -206,20 +200,4 @@ fi echo "REST VOL built" -# Clean out the old CMake cache -rm -f "${BUILD_DIR}/CMakeCache.txt" - -# Configure vol-tests - -mkdir -p "${BUILD_DIR}/tests/vol-tests" -cd "${BUILD_DIR}/tests/vol-tests" - -CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "${CMAKE_GENERATOR}" "-DHDF5_DIR=${HDF5_INSTALL_DIR}" -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" "${CONNECTOR_DEBUG_OPT}" "${CURL_DEBUG_OPT}" "${MEM_TRACK_OPT}" "${THREAD_SAFE_OPT}" "${SCRIPT_DIR}/test/vol-tests" - -echo "Build files generated for vol-tests" - -make || exit 1 - -echo "VOL tests built" - exit 0 diff --git a/src/rest_vol.c b/src/rest_vol.c index dba6f201..a6377254 100644 --- a/src/rest_vol.c +++ b/src/rest_vol.c @@ -2965,7 +2965,7 @@ RV_parse_dataspace(char *space) for (i = 0; i < dims_obj->u.array.len; i++) { if (i > 0) printf(", "); - printf("%llu", space_dims[i]); + printf("%" PRIuHSIZE, space_dims[i]); } printf(" ]\n\n"); if (maxdims_specified) { @@ -2973,7 +2973,7 @@ RV_parse_dataspace(char *space) for (i = 0; i < maxdims_obj->u.array.len; i++) { if (i > 0) printf(", "); - printf("%llu", space_maxdims[i]); + printf("%" PRIuHSIZE, space_maxdims[i]); } printf(" ]\n\n"); } @@ -3164,10 +3164,9 @@ RV_convert_dataspace_shape_to_JSON(hid_t space_id, char **shape_body, char **max strcat(maxdims_out_string_curr_pos++, "0"); } /* end if */ else { - if ((bytes_printed = - snprintf(maxdims_out_string_curr_pos, - maxdims_out_string_new_len - (size_t)maxdims_out_string_curr_pos, - "%s%" PRIuHSIZE, i > 0 ? "," : "", maxdims[i])) < 0) + if ((bytes_printed = snprintf(maxdims_out_string_curr_pos, + maxdims_out_string_new_len - (size_t)buf_ptrdiff, + "%s%" PRIuHSIZE, i > 0 ? "," : "", maxdims[i])) < 0) FUNC_GOTO_ERROR(H5E_DATASPACE, H5E_SYSERRSTR, FAIL, "snprintf error"); maxdims_out_string_curr_pos += bytes_printed; } /* end else */ @@ -4125,8 +4124,7 @@ RV_curl_delete(CURL *curl_handle, server_info_t *server_info, const char *reques strcpy(host_header, host_string); - curl_headers_local = curl_slist_append( - curl_headers_local, strncat(host_header, filename, host_header_len - strlen(host_string) - 1)); + curl_headers_local = curl_slist_append(curl_headers_local, strcat(host_header, filename)); /* Disable use of Expect: 100 Continue HTTP response */ curl_headers_local = curl_slist_append(curl_headers_local, "Expect:"); @@ -4201,8 +4199,7 @@ RV_curl_put(CURL *curl_handle, server_info_t *server_info, const char *request_e strcpy(host_header, host_string); - curl_headers_local = curl_slist_append( - curl_headers_local, strncat(host_header, filename, host_header_len - strlen(host_string) - 1)); + curl_headers_local = curl_slist_append(curl_headers_local, strcat(host_header, filename)); /* Disable use of Expect: 100 Continue HTTP response */ curl_headers_local = curl_slist_append(curl_headers_local, "Expect:"); @@ -4301,8 +4298,7 @@ RV_curl_get(CURL *curl_handle, server_info_t *server_info, const char *request_e strcpy(host_header, host_string); - curl_headers_local = curl_slist_append( - curl_headers_local, strncat(host_header, filename, host_header_len - strlen(host_string) - 1)); + curl_headers_local = curl_slist_append(curl_headers_local, strcat(host_header, filename)); /* Specify type of content being sent through cURL */ switch (content_type) { @@ -4387,8 +4383,7 @@ RV_curl_post(CURL *curl_handle, server_info_t *server_info, const char *request_ strcpy(host_header, host_string); - curl_headers_local = curl_slist_append( - curl_headers_local, strncat(host_header, filename, host_header_len - strlen(host_string) - 1)); + curl_headers_local = curl_slist_append(curl_headers_local, strcat(host_header, filename)); /* Specify type of content being sent through cURL */ switch (content_type) { diff --git a/src/rest_vol_attr.c b/src/rest_vol_attr.c index d612ac24..65e66ba8 100644 --- a/src/rest_vol_attr.c +++ b/src/rest_vol_attr.c @@ -710,7 +710,7 @@ RV_attr_read(void *attr, hid_t dtype_id, void *buf, hid_t dxpl_id, void **req) FUNC_GOTO_ERROR(H5E_DATATYPE, H5E_BADVALUE, FAIL, "memory datatype is invalid"); #ifdef RV_CONNECTOR_DEBUG - printf("-> %lld points selected for attribute read\n", file_select_npoints); + printf("-> %" PRIuHSIZE "points selected for attribute read\n", file_select_npoints); printf("-> Attribute's datatype size: %zu\n\n", dtype_size); #endif @@ -827,7 +827,7 @@ RV_attr_write(void *attr, hid_t dtype_id, const void *buf, hid_t dxpl_id, void * FUNC_GOTO_ERROR(H5E_DATASPACE, H5E_BADVALUE, FAIL, "memory datatype is invalid"); #ifdef RV_CONNECTOR_DEBUG - printf("-> %lld points selected for attribute write\n", file_select_npoints); + printf("-> %" PRIuHSIZE "points selected for attribute write\n", file_select_npoints); printf("-> Attribute's datatype size: %zu\n\n", dtype_size); #endif @@ -2525,7 +2525,7 @@ RV_traverse_attr_table(attr_table_entry *attr_table, size_t num_entries, const i #ifdef RV_CONNECTOR_DEBUG printf("-> Attribute %zu name: %s\n", last_idx, attr_table[last_idx].attr_name); printf("-> Attribute %zu creation time: %f\n", last_idx, attr_table[last_idx].crt_time); - printf("-> Attribute %zu data size: %llu\n\n", last_idx, + printf("-> Attribute %zu data size: %" PRIuHSIZE "\n\n", last_idx, attr_table[last_idx].attr_info.data_size); printf("-> Calling supplied callback function\n\n"); @@ -2556,7 +2556,7 @@ RV_traverse_attr_table(attr_table_entry *attr_table, size_t num_entries, const i #ifdef RV_CONNECTOR_DEBUG printf("-> Attribute %zu name: %s\n", last_idx, attr_table[last_idx].attr_name); printf("-> Attribute %zu creation time: %f\n", last_idx, attr_table[last_idx].crt_time); - printf("-> Attribute %zu data size: %llu\n\n", last_idx, + printf("-> Attribute %zu data size: %" PRIuHSIZE "\n\n", last_idx, attr_table[last_idx].attr_info.data_size); printf("-> Calling supplied callback function\n\n"); diff --git a/src/rest_vol_dataset.c b/src/rest_vol_dataset.c index b5fc7cbc..3790a507 100644 --- a/src/rest_vol_dataset.c +++ b/src/rest_vol_dataset.c @@ -607,8 +607,8 @@ RV_dataset_read(size_t count, void *dset[], hid_t mem_type_id[], hid_t _mem_spac "memory selection num points != file selection num points"); #ifdef RV_CONNECTOR_DEBUG - printf("-> %lld points selected in file dataspace\n", file_select_npoints); - printf("-> %lld points selected in memory dataspace\n\n", mem_select_npoints); + printf("-> %" PRIuHSIZE "points selected in file dataspace\n", file_select_npoints); + printf("-> %" PRIuHSIZE "points selected in memory dataspace\n\n", mem_select_npoints); #endif /* Setup the host header */ @@ -986,8 +986,8 @@ RV_dataset_write(size_t count, void *dset[], hid_t mem_type_id[], hid_t _mem_spa "memory selection num points != file selection num points"); #ifdef RV_CONNECTOR_DEBUG - printf("-> %lld points selected in file dataspace\n", file_select_npoints); - printf("-> %lld points selected in memory dataspace\n\n", mem_select_npoints); + printf("-> %" PRIuHSIZE "points selected in file dataspace\n", file_select_npoints); + printf("-> %" PRIuHSIZE "points selected in memory dataspace\n\n", mem_select_npoints); #endif if ((file_type_size = H5Tget_size(transfer_info[i].file_type_id)) == 0) @@ -1266,24 +1266,20 @@ RV_dataset_write(size_t count, void *dset[], hid_t mem_type_id[], hid_t _mem_spa #ifdef RV_CONNECTOR_DEBUG printf("-> Base64-encoded data buffer for dataset %zu: %s\n\n", i, - transfer_info[i].u.write_info.base64_encoded_values); + (char *)transfer_info[i].u.write_info.base64_encoded_values); #endif /* Copy encoded values into format string */ write_body_len = (strlen(fmt_string) - 4) + selection_body_len + value_body_len; if (NULL == (transfer_info[i].u.write_info.point_sel_buf = RV_malloc(write_body_len + 1))) FUNC_GOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "can't allocate space for write buffer"); - if ((bytes_printed = - snprintf(transfer_info[i].u.write_info.point_sel_buf, write_body_len + 1, fmt_string, - selection_body, transfer_info[i].u.write_info.base64_encoded_values)) < 0) + if ((bytes_printed = snprintf(transfer_info[i].u.write_info.point_sel_buf, write_body_len + 1, + fmt_string, selection_body, + (char *)transfer_info[i].u.write_info.base64_encoded_values)) < 0) FUNC_GOTO_ERROR(H5E_DATASET, H5E_SYSERRSTR, FAIL, "snprintf error"); transfer_info[i].u.write_info.uinfo.buffer = transfer_info[i].u.write_info.point_sel_buf; -#ifdef RV_CONNECTOR_DEBUG - printf("-> Write body: %s\n\n", transfer_info[i].u.write_info.selection_buf); -#endif - if (bytes_printed >= write_body_len + 1) FUNC_GOTO_ERROR(H5E_DATASET, H5E_SYSERRSTR, FAIL, "point selection write buffer exceeded allocated buffer size"); @@ -2509,7 +2505,7 @@ RV_parse_dataset_creation_properties_callback(char *HTTP_response, const void *c for (i = 0; i < YAJL_GET_ARRAY(chunk_dims_obj)->len; i++) { if (i > 0) printf(", "); - printf("%llu", chunk_dims[i]); + printf("%" PRIuHSIZE, chunk_dims[i]); } printf(" ]\n"); #endif @@ -3498,8 +3494,7 @@ RV_convert_dataset_creation_properties_to_JSON(hid_t dcpl, char **creation_prope const char *const external_file_str = "%s{" "\"name\": %s," "\"offset\": " OFF_T_SPECIFIER "," - "\"size\": %llu" - "}"; + "\"size\": " PRIuHSIZE "}"; /* Check whether the buffer needs to be grown */ bytes_to_print += strlen(external_storage_str); diff --git a/src/rest_vol_datatype.c b/src/rest_vol_datatype.c index b6bbceb6..6ae7cf92 100644 --- a/src/rest_vol_datatype.c +++ b/src/rest_vol_datatype.c @@ -1948,7 +1948,7 @@ RV_convert_JSON_to_datatype(const char *type) for (i = 0; i < YAJL_GET_ARRAY(key_obj)->len; i++) { if (i > 0) printf(", "); - printf("%llu", array_dims[i]); + printf("%" PRIuHSIZE, array_dims[i]); } printf("]\n"); #endif @@ -2170,7 +2170,7 @@ RV_convert_JSON_to_datatype(const char *type) memcpy(tmp_vlen_type_buffer + 2, type_class_keys[0], strlen(type_class_keys[0])); #ifdef RV_CONNECTOR_DEBUG - printf("-> Converting variable length datatype's parent type from JSON to hid_t\n", i); + printf("-> Converting variable length datatype's parent type from JSON to hid_t\n"); #endif /* Recursively parse parent datatype from JSON */ diff --git a/src/rest_vol_group.c b/src/rest_vol_group.c index 9c9825a1..6696cca0 100644 --- a/src/rest_vol_group.c +++ b/src/rest_vol_group.c @@ -729,7 +729,7 @@ RV_get_group_info_callback(char *HTTP_response, const void *callback_data_in, vo group_info->nlinks = (hsize_t)YAJL_GET_INTEGER(key_obj); #ifdef RV_CONNECTOR_DEBUG - printf("-> Group had %llu links in it\n\n", group_info->nlinks); + printf("-> Group had %" PRIuHSIZE "links in it\n\n", group_info->nlinks); #endif done: diff --git a/src/rest_vol_link.c b/src/rest_vol_link.c index 88e03f38..b98c555e 100644 --- a/src/rest_vol_link.c +++ b/src/rest_vol_link.c @@ -1574,7 +1574,7 @@ RV_get_link_name_by_idx_callback(char *HTTP_response, const void *callback_data_ FUNC_GOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "link index number larger than number of links"); #ifdef RV_CONNECTOR_DEBUG - printf("-> Retrieving link name of link at index %PRIuHSIZE\n\n", *by_idx_data->idx_p); + printf("-> Retrieving link name of link at index %" PRIuHSIZE "\n\n", *by_idx_data->idx_p); #endif /* Retrieve the nth link name */ diff --git a/src/rest_vol_object.c b/src/rest_vol_object.c index 5c4ea994..0ab5a2b5 100644 --- a/src/rest_vol_object.c +++ b/src/rest_vol_object.c @@ -1127,7 +1127,7 @@ RV_get_object_info_callback(char *HTTP_response, const void *callback_data_in, v obj_info->num_attrs = (hsize_t)YAJL_GET_INTEGER(key_obj); #ifdef RV_CONNECTOR_DEBUG - printf("-> Object had %llu attributes attached to it\n\n", obj_info->num_attrs); + printf("-> Object had %" PRIuHSIZE "attributes attached to it\n\n", obj_info->num_attrs); #endif /* Retrieve the object's class */ diff --git a/test/vol-tests b/test/vol-tests deleted file mode 160000 index 996dd872..00000000 --- a/test/vol-tests +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 996dd87212b2547f1ce638d29b64c8ca436d859c