diff --git a/CHANGELOG.md b/CHANGELOG.md index 74b86b86e7..26742f7bdb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,25 @@ +# NEXT RELEASE + +### Enhancements +* None. + +### Fixed +* Having a query with a number of predicates ORed together may result in a crash on some platforms (strict weak ordering check failing on iphone) ([#8028](https://github.com/realm/realm-core/issues/8028), since v14.6.0) +* None. + +### Breaking changes +* None. + +### Compatibility +* Fileformat: Generates files with format v24. Reads and automatically upgrade from fileformat v10. If you want to upgrade from an earlier file format version you will have to use RealmCore v13.x.y or earlier. + +----------- + +### Internals +* None. + +---------------------------------------------- + # 20.0.1 Release notes ### Enhancements diff --git a/dependencies.yml b/dependencies.yml index 71e13577b0..9ccf12002c 100644 --- a/dependencies.yml +++ b/dependencies.yml @@ -2,6 +2,6 @@ PACKAGE_NAME: realm-core VERSION: 20.0.1 OPENSSL_VERSION: 3.3.1 # https://github.com/10gen/baas/commits -# 2f308db is 2024 July 10 -BAAS_VERSION: 2f308db6f65333728a101d1fecbb792f9659a5ce +# 04e3f27ad0e is 2024 Sep 8th +BAAS_VERSION: 04e3f27ad0eb9154bc4e3b631d179d702ac05215 BAAS_VERSION_TYPE: githash diff --git a/evergreen/bloaty_to_json.py b/evergreen/bloaty_to_json.py deleted file mode 100755 index 47b5967d08..0000000000 --- a/evergreen/bloaty_to_json.py +++ /dev/null @@ -1,175 +0,0 @@ -#!/usr/bin/env python3 -from __future__ import annotations - -import argparse -import json -import re -from csv import DictReader -from pathlib import Path - -parser = argparse.ArgumentParser(description='Checks how bloated realm has become') -parser.add_argument( - '--short-symbols-input', - type=Path, - help='Path to CSV output of short symbols input file', -) -parser.add_argument( - '--sections-input', - type=Path, - help='Path to CSV output of sections input file', -) - -parser.add_argument( - '--compileunits-input', - type=Path, - help='Path to CSV output of compileunits input file', -) - -parser.add_argument( - '--analyzed-file', - type=str, - help='Name of file being analyzed by bloaty', -) - -evgOpts = parser.add_argument_group('Evergreen Metadata') -evgOpts.add_argument('--output', type=Path, help='The evergreen json output filename') -evgOpts.add_argument('--project', type=str, help='Evergreen project this script is running in') -evgOpts.add_argument('--execution', type=int, help='Execution # of this evergreen task') -evgOpts.add_argument( - '--is-patch', - type=bool, - dest='is_patch', - help='Specify if this is not a patch build', -) -evgOpts.add_argument( - '--build-variant', - type=str, - dest='build_variant', - help='Build variant of the evergreen task', -) -evgOpts.add_argument('--branch', type=str, help='Git branch that was being tested') -evgOpts.add_argument('--revision', type=str, help='Git sha being tested') -evgOpts.add_argument('--task-id', type=str, dest='task_id', help='Evergreen task ID of this task') -evgOpts.add_argument('--task-name', type=str, dest='task_name', help='Name of this evergreen task') -evgOpts.add_argument( - '--revision-order-id', - type=str, - dest='revision_order_id', - help='Evergreen revision order id', -) -evgOpts.add_argument('--version-id', type=str, dest='version_id', help='Name of this evergreen version') - -args = parser.parse_args() -patch_username : str = '' - -def parse_patch_order(): - global patch_username - patch_order_re = re.compile(r"(?P[\w\@\.]+)_(?P\d+)") - match_obj = patch_order_re.match(args.revision_order_id) - patch_username = match_obj.group('patch_username') - return int(match_obj.group('patch_order')) -evg_order = int(args.revision_order_id) if not args.is_patch else parse_patch_order() - -cxx_method_re = re.compile( - # namespaces/parent class name - r"(?P(?:(?:[_a-zA-Z][\w]*)(?:<.*>)?(?:::)|(?:\(anonymous namespace\)::))+)" + - r"(?P[\~a-zA-Z_][\w]*)(?:<.*>)?" + # function/class name - r"(?P\(\))?" + # if this is function, this will capture "()" - # will be a number if this is a lambda - r"(?:::\{lambda\(\)\#(?P\d+)\}::)?") - -elf_section_re = re.compile(r"\[section \.(?P[\w\.\-]+)\]") - -items : list[dict] = [] -sections_seen = set() -if args.short_symbols_input: - with open(args.short_symbols_input, 'r') as csv_file: - input_csv_reader = DictReader(csv_file) - for row in input_csv_reader: - raw_name = row['shortsymbols'] - if match := cxx_method_re.search(raw_name): - ns = match.group('ns').rstrip(':') - - node_name = match.group('name') - if match.group('lambda_number'): - node_name = "{} lambda #{}".format(node_name, match.group('lambda_number')) - - type_str: str = 'symbol' - if match.group('lambda_number'): - type_str = 'lambda' - elif match.group('is_function'): - type_str = 'function' - - items.append({ - 'type': type_str, - 'name': raw_name, - 'ns': ns, - 'file_size': int(row['filesize']), - 'vm_size': int(row['vmsize']), - }) - - elif match := elf_section_re.search(raw_name): - section_name = match.group('section_name') - type_str: str = 'section' if not section_name.startswith('.debug') else 'debug_section' - if section_name not in sections_seen: - items.append({ - 'type': type_str, - 'name': section_name, - 'file_size': int(row['filesize']), - 'vm_size': int(row['vmsize']) - }) - else: - items.append({ - 'type': 'symbol', - 'name': raw_name, - 'file_size': int(row['filesize']), - 'vm_size': int(row['vmsize']), - }) - -if args.sections_input: - with open(args.sections_input, 'r') as csv_file: - input_csv_reader = DictReader(csv_file) - - for row in input_csv_reader: - section_name = row['sections'] - type_str: str = 'section' if not section_name.startswith('.debug') else 'debug_section' - if section_name not in sections_seen: - items.append({ - 'name': section_name, - 'type': type_str, - 'file_size': int(row['filesize']), - 'vm_size': int(row['vmsize']) - }) - -if args.sections_input: - with open(args.compileunits_input, 'r') as csv_file: - input_csv_reader = DictReader(csv_file) - - for row in input_csv_reader: - compileunit_name = row['compileunits'] - if not elf_section_re.search(compileunit_name): - items.append({ - 'name': compileunit_name, - 'type': 'compileunit', - 'file_size': int(row['filesize']), - 'vm_size': int(row['vmsize']) - }) - -output_obj = { - 'items': items, - 'execution': args.execution, - 'is_mainline': (args.is_patch is not True), - 'analyzed_file': args.analyzed_file, - 'order': evg_order, - 'project': args.project, - 'branch': args.branch, - 'build_variant': args.build_variant, - 'revision': args.revision, - 'task_id': args.task_id, - 'task_name': args.task_name, - 'version_id': args.version_id, - 'patch_username': patch_username -} - -with open(args.output, 'w') as out_fp: - json.dump(output_obj, out_fp) diff --git a/evergreen/config.yml b/evergreen/config.yml index b733788309..d5ddc5d1fc 100644 --- a/evergreen/config.yml +++ b/evergreen/config.yml @@ -223,33 +223,6 @@ functions: make "-j$NPROC" 2>&1 popd > /dev/null # realm-core - "run benchmark": - - command: shell.exec - params: - working_dir: realm-core - shell: bash - script: |- - set -o errexit - - if [[ -z "${benchmark_name}" ]]; then - echo "No benchmark specified." - exit 1 - fi - - export UNITTEST_THREADS=1 - - BENCHMARK=$(./evergreen/abspath.sh ./build/test/benchmark-${benchmark_name}/${cmake_build_type|Debug}/realm-benchmark-${benchmark_name}) - echo "Going to run benchmark $BENCHMARK" - - [[ -d benchmark_results ]] && rm -rf benchmark_results - mkdir benchmark_results - cd benchmark_results - - $BENCHMARK "$(pwd)/" - - command: perf.send - params: - file: './realm-core/benchmark_results/results.latest.json' - "run tests": - command: expansions.update params: @@ -351,52 +324,6 @@ functions: params: file_location: realm-core/${task_name}_results.json - "upload fuzzer results": - - command: shell.exec - params: - working_dir: realm-core/build/test/realm-fuzzer - script: |- - if ls crash-*> /dev/null 2>&1; then - echo "Found crash file" - #Rename the crash file and the realm file. - #If there is a crash, this will signal that something needs to be uploaded. - mv crash-* realm-fuzzer-crash.txt - mv fuzz-realm.realm fuzzer_realm.realm - fi - - - command: s3.put - params: - working_dir: realm-core/build/test/realm-fuzzer - aws_key: '${artifacts_aws_access_key}' - aws_secret: '${artifacts_aws_secret_key}' - local_file: 'realm-core/build/test/realm-fuzzer/realm-fuzzer-crash.txt' - remote_file: '${project}/${branch_name}/${task_id}/${execution}/realm-fuzzer-crash.txt' - bucket: mciuploads - permissions: public-read - content_type: text/plain - display_name: Crash input file - optional: true - - - command: s3.put - params: - working_dir: realm-core/build/test/realm-fuzzer - aws_key: '${artifacts_aws_access_key}' - aws_secret: '${artifacts_aws_secret_key}' - local_file: 'realm-core/build/test/realm-fuzzer/fuzzer_realm.realm' - remote_file: '${project}/${branch_name}/${task_id}/${execution}/fuzzer_realm.realm' - bucket: mciuploads - permissions: public-read - content_type: application/x-binary - display_name: Realm File - optional: true - - - command: shell.exec - params: - working_dir: realm-core/build/test/realm-fuzzer - script: |- - #remove the crash file and the realm produced by the fuzzer run - rm realm-fuzzer-crash.txt - rm fuzzer_realm.realm "run hang analyzer": - command: shell.exec @@ -674,22 +601,6 @@ tasks: test_filter: CoreTests test_executable_name: "realm-tests" -- name: benchmark-common-tasks - exec_timeout_secs: 1800 - tags: [ "benchmark" ] - commands: - - func: "run benchmark" - vars: - benchmark_name: common-tasks - -- name: benchmark-crud - exec_timeout_secs: 1800 - tags: [ "benchmark" ] - commands: - - func: "run benchmark" - vars: - benchmark_name: crud - # These are local object store tests; baas is not started, however some use the sync server - name: object-store-tests tags: [ "for_pull_requests", "test_suite" ] @@ -851,16 +762,6 @@ tasks: echo "COMMAND: git clang-format -f --commit $format_ref" exit 1 -- name: fuzzer - tags: [ "for_nightly_tests" ] - allowed_requesters: [ "ad_hoc", "patch" ] - commands: - - command: shell.exec - params: - working_dir: realm-core/build/test/realm-fuzzer - shell: /bin/bash - script: |- - ${cmake_build_type|Debug}/realm-libfuzz -rss_limit_mb=0 -max_total_time=3600 - name: combined-tests-ios-simulator tags: [ "for_pull_requests" ] @@ -898,7 +799,6 @@ tasks: 'io.realm.CombinedTests' \ $TEST_RESULTS_FILE - task_groups: - name: core_tests_group setup_group_can_fail_task: true @@ -983,18 +883,6 @@ task_groups: - .test_suite - process_coverage_data -- name: benchmarks - setup_group_can_fail_task: true - setup_group: - - func: "fetch source" - - func: "fetch binaries" - - func: "compile" - vars: - target_to_build: "benchmarks" - timeout: - - func: "run hang analyzer" - tasks: - - .benchmark - name: long-running-tests setup_group_can_fail_task: true @@ -1012,18 +900,6 @@ task_groups: tasks: - long-running-core-tests -- name: fuzzer-tests - setup_group_can_fail_task: true - setup_group: - - func: "fetch source" - - func: "fetch binaries" - - func: "compile" - vars: - target_to_build: realm-libfuzz - teardown_task: - - func: "upload fuzzer results" - tasks: - - fuzzer - name: ios-simulator-tests max_hosts: 1 @@ -1149,38 +1025,6 @@ buildvariants: tasks: - name: core_tests_group -# TODO RCORE-2085 ubuntu2004-release/ubuntu2004-arm64 build variants are here until we've established -# a new baseline for the updated ubuntu 2204/clang 18 builders and to generate artifacts for the baas -# team. - -- name: ubuntu2004-arm64 - display_name: "Ubuntu 20.04 ARM64 (GCC 9 release benchmarks)" - run_on: ubuntu2004-arm64-large - expansions: - cmake_url: "https://s3.amazonaws.com/static.realm.io/evergreen-assets/cmake-3.26.3-linux-aarch64.tar.gz" - cmake_bindir: "./cmake_binaries/bin" - python3: "/opt/mongodbtoolchain/v3/bin/python3" - use_system_openssl: On - fetch_missing_dependencies: On - cmake_build_type: RelWithDebInfo - tasks: - - name: benchmarks - -- name: ubuntu-x86_64-benchmarks - display_name: "Ubuntu x86_64 benchmarks" - run_on: ubuntu2204-large - expansions: - fetch_missing_dependencies: On - cmake_url: "https://s3.amazonaws.com/static.realm.io/evergreen-assets/cmake-3.26.3-linux-x86_64.tar.gz" - cmake_bindir: "./cmake_binaries/bin" - clang_url: "https://s3.amazonaws.com/static.realm.io/evergreen-assets/clang%2Bllvm-18.1.2-x86_64-linux-gnu.tar.xz" - clang_sha256_sum: "feab4b0f5fba325bfe0f4245710fd19fd74f813f44b5e81eda794f4f75bca343" - c_compiler: "./clang_binaries/bin/clang" - cxx_compiler: "./clang_binaries/bin/clang++" - cmake_build_type: RelWithDebInfo - tasks: - - name: benchmarks - - name: ubuntu-release display_name: "Ubuntu (Release build)" run_on: ubuntu2204-arm64-large @@ -1192,7 +1036,6 @@ buildvariants: python3: /opt/mongodbtoolchain/v3/bin/python3 tasks: - name: compile_test_and_package - - name: benchmarks - name: long-running-tests - name: ubuntu-asan @@ -1248,20 +1091,6 @@ buildvariants: tasks: - name: compile_test -- name: ubuntu-fuzzer - display_name: "Ubuntu (Fuzzer)" - run_on: ubuntu2204-arm64-large - expansions: - fetch_missing_dependencies: On - enable_ubsan: On - c_compiler: "/opt/clang+llvm/bin/clang" - cxx_compiler: "/opt/clang+llvm/bin/clang++" - cmake_build_type: RelWithDebInfo - run_with_encryption: 1 - enable_fuzzer: On - tasks: - - name: fuzzer-tests - - name: ubuntu-emscripten display_name: "Ubuntu (Emscripten x86_64)" run_on: ubuntu2204-large @@ -1342,35 +1171,19 @@ buildvariants: - name: core_tests_group - name: macos-release - display_name: "MacOS 11.0 x86_64 (Release build)" - run_on: macos-1100 + display_name: "MacOS 13.0 arm64 (Release build)" + run_on: macos-13-arm64 expansions: cmake_url: "https://s3.amazonaws.com/static.realm.io/evergreen-assets/cmake-3.26.3-macos-universal.tar.gz" cmake_bindir: "./cmake_binaries/CMake.app/Contents/bin" cmake_generator: Xcode max_jobs: $(sysctl -n hw.logicalcpu) cmake_build_type: Release - xcode_developer_dir: /Applications/Xcode13.1.app/Contents/Developer + xcode_developer_dir: /Applications/Xcode.app/Contents/Developer extra_flags: -DREALM_ENABLE_ASSERTIONS=ON tasks: - - name: benchmarks - - name: compile_test - name: test-on-exfat -- name: macos-1100-arm64-release - display_name: "MacOS 11 arm64 (Release benchmarks)" - run_on: macos-1100-arm64 - expansions: - cmake_url: "https://s3.amazonaws.com/static.realm.io/evergreen-assets/cmake-3.26.3-macos-universal.tar.gz" - cmake_bindir: "./cmake_binaries/CMake.app/Contents/bin" - cmake_generator: Xcode - max_jobs: $(sysctl -n hw.logicalcpu) - cmake_build_type: Release - xcode_developer_dir: /Applications/Xcode13.1.app/Contents/Developer - extra_flags: -DREALM_ENABLE_ASSERTIONS=ON - tasks: - - name: benchmarks - - name: macos display_name: "MacOS 14 arm64" run_on: macos-14-arm64 @@ -1401,8 +1214,6 @@ buildvariants: extra_flags: -DCMAKE_SYSTEM_NAME=Darwin -DCMAKE_OSX_ARCHITECTURES=arm64 tasks: - name: compile_test_and_package - # benchmarks are disabled for now because of perf problems on AWS macos instances. - # - name: benchmarks - name: long-running-tests - name: swift-build-and-test @@ -1601,9 +1412,8 @@ buildvariants: expansions: cmake_bindir: "/cygdrive/c/Program Files/CMake/bin/" cmake_generator: "Visual Studio 16 2019" - extra_flags: "-A x64" cmake_build_type: "Debug" - extra_flags: -DCMAKE_SYSTEM_NAME=WindowsStore -DCMAKE_SYSTEM_VERSION=10.0 + extra_flags: -A x64 -DCMAKE_SYSTEM_NAME=WindowsStore -DCMAKE_SYSTEM_VERSION=10.0 max_jobs: $(($(grep -c proc /proc/cpuinfo) / 2)) fetch_missing_dependencies: On python3: "/cygdrive/c/python/python37/python.exe" diff --git a/evergreen/proxy-network-faults.toxics b/evergreen/proxy-network-faults.toxics deleted file mode 100644 index 8168089606..0000000000 --- a/evergreen/proxy-network-faults.toxics +++ /dev/null @@ -1,4 +0,0 @@ -{"name": "limit-data-upstream","type": "limit_data","stream": "upstream","toxicity": 0.6,"attributes": {"bytes": %RANDOM1%}} -{"name": "limit-data-downstream","type": "limit_data","stream": "downstream","toxicity": 0.6,"attributes": {"bytes": %RANDOM2%}} -{"name": "slow-close-upstream","type": "slow_close","stream": "upstream","toxicity": 0.6,"attributes": {"delay": %RANDOM3%}} -{"name": "reset-peer-upstream","type": "reset_peer","stream": "upstream","toxicity": 0.6,"attributes": {"timeout": %RANDOM4%}} diff --git a/evergreen/proxy-nonideal-transfer.toxics b/evergreen/proxy-nonideal-transfer.toxics deleted file mode 100644 index c0ff2d4b9b..0000000000 --- a/evergreen/proxy-nonideal-transfer.toxics +++ /dev/null @@ -1,5 +0,0 @@ -{"name": "latency-upstream","type": "latency","stream": "upstream","toxicity": 0.5,"attributes": {"latency": 250,"jitter": 250}} -{"name": "latency-downstream","type": "latency","stream": "downstream","toxicity": 0.5,"attributes": {"latency": 0,"jitter": 250}} -{"name": "bandwidth-upstream","type": "bandwidth","stream": "upstream","toxicity": 0.5,"attributes": {"rate": %RANDOM1%}} -{"name": "bandwidth-downstream","type": "bandwidth","stream": "downstream","toxicity": 0.5,"attributes": {"rate": %RANDOM2%}} -{"name": "slicer-downstream","type": "slicer","stream": "downstream","toxicity": 0.5,"attributes": {"average_size": 500,"size_variation": 250,"delay": 10000}} diff --git a/src/realm/query_engine.hpp b/src/realm/query_engine.hpp index 39d39b43a5..573405b03e 100644 --- a/src/realm/query_engine.hpp +++ b/src/realm/query_engine.hpp @@ -2268,7 +2268,7 @@ class OrNode : public ParentNode { std::type_index m_type; bool operator<(const ConditionType& other) const { - return this->m_col < other.m_col && this->m_type < other.m_type; + return (this->m_col == other.m_col) ? this->m_type < other.m_type : this->m_col < other.m_col; } bool operator!=(const ConditionType& other) const { diff --git a/test/object-store/object.cpp b/test/object-store/object.cpp index 4ef8cf5148..910492589a 100644 --- a/test/object-store/object.cpp +++ b/test/object-store/object.cpp @@ -574,6 +574,16 @@ TEST_CASE("object") { }); } + SECTION("modifying origin table 'table2', property 'value' " + "while observing related table 'table', property 'origin' " + "-> does NOT send a notification") { + auto token = require_no_change(object_target, kpa_target_backlink); + + write([&] { + object_origin.set_column_value("value", 105); + }); + } + SECTION("modifying related table 'table', property 'value 1' " "while observing related table 'table', property 'value 1' " "-> DOES send a notification") {